MP4录制添加H265支持

This commit is contained in:
zqsong 2019-07-30 19:26:38 +08:00
parent fbf0469ef2
commit 360eba2c68
7 changed files with 1773 additions and 65 deletions

View File

@ -36,6 +36,7 @@ set(ENABLE_MYSQL true)
set(ENABLE_MP4V2 true) set(ENABLE_MP4V2 true)
set(ENABLE_FAAC true) set(ENABLE_FAAC true)
set(ENABLE_X264 true) set(ENABLE_X264 true)
set(MP4_H265RECORD true)
# #
if(ENABLE_HLS) if(ENABLE_HLS)
@ -47,6 +48,12 @@ else()
set(LINK_LIB_LIST zlmediakit zltoolkit) set(LINK_LIB_LIST zlmediakit zltoolkit)
endif() endif()
if(MP4_H265RECORD)
message(STATUS "MP4_H265RECORD defined")
add_definitions(-DMP4_H265RECORD)
set(MediaServer_Root ${CMAKE_SOURCE_DIR}/3rdpart/media-server)
list(APPEND LINK_LIB_LIST mov flv)
endif()
#openssl #openssl
find_package(OpenSSL QUIET) find_package(OpenSSL QUIET)
if (OPENSSL_FOUND AND ENABLE_OPENSSL) if (OPENSSL_FOUND AND ENABLE_OPENSSL)
@ -111,6 +118,21 @@ if(ENABLE_HLS)
endif(WIN32) endif(WIN32)
endif() endif()
if(MP4_H265RECORD)
aux_source_directory(${MediaServer_Root}/libmov/include src_mov)
aux_source_directory(${MediaServer_Root}/libmov/source src_mov)
include_directories(${MediaServer_Root}/libmov/include)
aux_source_directory(${MediaServer_Root}/libflv/include src_flv)
aux_source_directory(${MediaServer_Root}/libflv/source src_flv)
include_directories(${MediaServer_Root}/libflv/include)
add_library(mov STATIC ${src_mov})
add_library(flv STATIC ${src_flv})
if(WIN32)
set_target_properties(mov flv PROPERTIES COMPILE_FLAGS ${VS_FALGS} )
endif(WIN32)
endif()
if (WIN32) if (WIN32)
list(APPEND LINK_LIB_LIST WS2_32 Iphlpapi shlwapi) list(APPEND LINK_LIB_LIST WS2_32 Iphlpapi shlwapi)
set_target_properties(zltoolkit PROPERTIES COMPILE_FLAGS ${VS_FALGS} ) set_target_properties(zltoolkit PROPERTIES COMPILE_FLAGS ${VS_FALGS} )

View File

@ -76,8 +76,14 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
if (strcasecmp(track->_codec.data(), "h265") == 0) { if (strcasecmp(track->_codec.data(), "h265") == 0) {
//a=fmtp:96 sprop-sps=QgEBAWAAAAMAsAAAAwAAAwBdoAKAgC0WNrkky/AIAAADAAgAAAMBlQg=; sprop-pps=RAHA8vA8kAA= //a=fmtp:96 sprop-sps=QgEBAWAAAAMAsAAAAwAAAwBdoAKAgC0WNrkky/AIAAADAAgAAAMBlQg=; sprop-pps=RAHA8vA8kAA=
int pt; int pt, id;
char sprop_vps[128] = {0},sprop_sps[128] = {0},sprop_pps[128] = {0}; char sprop_vps[128] = {0},sprop_sps[128] = {0},sprop_pps[128] = {0};
if (5 == sscanf(track->_fmtp.data(), "%d profile-id=%d; sprop-sps=%127[^;]; sprop-pps=%127[^;]; sprop-vps=%127[^;]", &pt, &id, sprop_sps,sprop_pps, sprop_vps)) {
auto vps = decodeBase64(sprop_vps);
auto sps = decodeBase64(sprop_sps);
auto pps = decodeBase64(sprop_pps);
return std::make_shared<H265Track>(vps,sps,pps,0,0,0);
}
if (4 == sscanf(track->_fmtp.data(), "%d sprop-vps=%127[^;]; sprop-sps=%127[^;]; sprop-pps=%127[^;]", &pt, sprop_vps,sprop_sps, sprop_pps)) { if (4 == sscanf(track->_fmtp.data(), "%d sprop-vps=%127[^;]; sprop-sps=%127[^;]; sprop-pps=%127[^;]", &pt, sprop_vps,sprop_sps, sprop_pps)) {
auto vps = decodeBase64(sprop_vps); auto vps = decodeBase64(sprop_vps);
auto sps = decodeBase64(sprop_sps); auto sps = decodeBase64(sprop_sps);

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,15 @@
#define QP_MAX_NUM (51 + 6*6) // The maximum supported qp #define QP_MAX_NUM (51 + 6*6) // The maximum supported qp
#define HEVC_MAX_SHORT_TERM_RPS_COUNT 64
#define T_PROFILE_HEVC_MAIN 1
#define T_PROFILE_HEVC_MAIN_10 2
#define T_PROFILE_HEVC_MAIN_STILL_PICTURE 3
#define T_PROFILE_HEVC_REXT 4
/** /**
* Chromaticity coordinates of the source primaries. * Chromaticity coordinates of the source primaries.
*/ */
@ -67,6 +76,62 @@ enum T_AVColorSpace {
}; };
enum {
// 7.4.3.1: vps_max_layers_minus1 is in [0, 62].
HEVC_MAX_LAYERS = 63,
// 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6].
HEVC_MAX_SUB_LAYERS = 7,
// 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023].
HEVC_MAX_LAYER_SETS = 1024,
// 7.4.2.1: vps_video_parameter_set_id is u(4).
HEVC_MAX_VPS_COUNT = 16,
// 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15].
HEVC_MAX_SPS_COUNT = 16,
// 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63].
HEVC_MAX_PPS_COUNT = 64,
// A.4.2: MaxDpbSize is bounded above by 16.
HEVC_MAX_DPB_SIZE = 16,
// 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1].
HEVC_MAX_REFS = HEVC_MAX_DPB_SIZE,
// 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64].
HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64,
// 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32].
HEVC_MAX_LONG_TERM_REF_PICS = 32,
// A.3: all profiles require that CtbLog2SizeY is in [4, 6].
HEVC_MIN_LOG2_CTB_SIZE = 4,
HEVC_MAX_LOG2_CTB_SIZE = 6,
// E.3.2: cpb_cnt_minus1[i] is in [0, 31].
HEVC_MAX_CPB_CNT = 32,
// A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584.
HEVC_MAX_LUMA_PS = 35651584,
// A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are
// constrained to be not greater than sqrt(MaxLumaPs * 8). Hence height/
// width are bounded above by sqrt(8 * 35651584) = 16888.2 samples.
HEVC_MAX_WIDTH = 16888,
HEVC_MAX_HEIGHT = 16888,
// A.4.1: table A.6 allows at most 22 tile rows for any level.
HEVC_MAX_TILE_ROWS = 22,
// A.4.1: table A.6 allows at most 20 tile columns for any level.
HEVC_MAX_TILE_COLUMNS = 20,
// 7.4.7.1: in the worst case (tiles_enabled_flag and
// entropy_coding_sync_enabled_flag are both set), entry points can be
// placed at the beginning of every Ctb row in every tile, giving an
// upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1.
// Only a stream with very high resolution and perverse parameters could
// get near that, though, so set a lower limit here with the maximum
// possible value for 4K video (at most 135 16x16 Ctb rows).
HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135,
};
/** /**
* rational number numerator/denominator * rational number numerator/denominator
*/ */
@ -170,6 +235,189 @@ typedef struct T_PPS {
int iChromaQpDiff; int iChromaQpDiff;
} T_PPS; } T_PPS;
typedef struct T_HEVCWindow {
unsigned int left_offset;
unsigned int right_offset;
unsigned int top_offset;
unsigned int bottom_offset;
} T_HEVCWindow;
typedef struct T_VUI {
T_AVRational sar;
int overscan_info_present_flag;
int overscan_appropriate_flag;
int video_signal_type_present_flag;
int video_format;
int video_full_range_flag;
int colour_description_present_flag;
uint8_t colour_primaries;
uint8_t transfer_characteristic;
uint8_t matrix_coeffs;
int chroma_loc_info_present_flag;
int chroma_sample_loc_type_top_field;
int chroma_sample_loc_type_bottom_field;
int neutra_chroma_indication_flag;
int field_seq_flag;
int frame_field_info_present_flag;
int default_display_window_flag;
T_HEVCWindow def_disp_win;
int vui_timing_info_present_flag;
uint32_t vui_num_units_in_tick;
uint32_t vui_time_scale;
int vui_poc_proportional_to_timing_flag;
int vui_num_ticks_poc_diff_one_minus1;
int vui_hrd_parameters_present_flag;
int bitstream_restriction_flag;
int tiles_fixed_structure_flag;
int motion_vectors_over_pic_boundaries_flag;
int restricted_ref_pic_lists_flag;
int min_spatial_segmentation_idc;
int max_bytes_per_pic_denom;
int max_bits_per_min_cu_denom;
int log2_max_mv_length_horizontal;
int log2_max_mv_length_vertical;
} T_VUI;
typedef struct T_PTLCommon {
uint8_t profile_space;
uint8_t tier_flag;
uint8_t profile_idc;
uint8_t profile_compatibility_flag[32];
uint8_t level_idc;
uint8_t progressive_source_flag;
uint8_t interlaced_source_flag;
uint8_t non_packed_constraint_flag;
uint8_t frame_only_constraint_flag;
} T_PTLCommon;
typedef struct T_PTL {
T_PTLCommon general_ptl;
T_PTLCommon sub_layer_ptl[HEVC_MAX_SUB_LAYERS];
uint8_t sub_layer_profile_present_flag[HEVC_MAX_SUB_LAYERS];
uint8_t sub_layer_level_present_flag[HEVC_MAX_SUB_LAYERS];
} T_PTL;
typedef struct T_ScalingList {
/* This is a little wasteful, since sizeID 0 only needs 8 coeffs,
* and size ID 3 only has 2 arrays, not 6. */
uint8_t sl[4][6][64];
uint8_t sl_dc[2][6];
} T_ScalingList;
typedef struct T_ShortTermRPS {
unsigned int num_negative_pics;
int num_delta_pocs;
int rps_idx_num_delta_pocs;
int32_t delta_poc[32];
uint8_t used[32];
} T_ShortTermRPS;
typedef struct T_HEVCSPS {
unsigned vps_id;
int chroma_format_idc;
uint8_t separate_colour_plane_flag;
///< output (i.e. cropped) values
int output_width, output_height;
T_HEVCWindow output_window;
T_HEVCWindow pic_conf_win;
int bit_depth;
int bit_depth_chroma;
int pixel_shift;
// enum AVPixelFormat pix_fmt;
unsigned int log2_max_poc_lsb;
int pcm_enabled_flag;
int max_sub_layers;
struct {
int max_dec_pic_buffering;
int num_reorder_pics;
int max_latency_increase;
} temporal_layer[HEVC_MAX_SUB_LAYERS];
uint8_t temporal_id_nesting_flag;
T_VUI vui;
T_PTL ptl;
uint8_t scaling_list_enable_flag;
T_ScalingList scaling_list;
unsigned int nb_st_rps;
T_ShortTermRPS st_rps[HEVC_MAX_SHORT_TERM_RPS_COUNT];
uint8_t amp_enabled_flag;
uint8_t sao_enabled;
uint8_t long_term_ref_pics_present_flag;
uint16_t lt_ref_pic_poc_lsb_sps[32];
uint8_t used_by_curr_pic_lt_sps_flag[32];
uint8_t num_long_term_ref_pics_sps;
struct {
uint8_t bit_depth;
uint8_t bit_depth_chroma;
unsigned int log2_min_pcm_cb_size;
unsigned int log2_max_pcm_cb_size;
uint8_t loop_filter_disable_flag;
} pcm;
uint8_t sps_temporal_mvp_enabled_flag;
uint8_t sps_strong_intra_smoothing_enable_flag;
unsigned int log2_min_cb_size;
unsigned int log2_diff_max_min_coding_block_size;
unsigned int log2_min_tb_size;
unsigned int log2_max_trafo_size;
unsigned int log2_ctb_size;
unsigned int log2_min_pu_size;
int max_transform_hierarchy_depth_inter;
int max_transform_hierarchy_depth_intra;
int transform_skip_rotation_enabled_flag;
int transform_skip_context_enabled_flag;
int implicit_rdpcm_enabled_flag;
int explicit_rdpcm_enabled_flag;
int intra_smoothing_disabled_flag;
int persistent_rice_adaptation_enabled_flag;
///< coded frame dimension in various units
int width;
int height;
int ctb_width;
int ctb_height;
int ctb_size;
int min_cb_width;
int min_cb_height;
int min_tb_width;
int min_tb_height;
int min_pu_width;
int min_pu_height;
int tb_mask;
int hshift[3];
int vshift[3];
int qp_bd_offset;
uint8_t data[4096];
int data_size;
}T_HEVCSPS;
typedef struct T_GetBitContext{ typedef struct T_GetBitContext{
uint8_t *pu8Buf; /*Ö¸ÏòSPS start*/ uint8_t *pu8Buf; /*Ö¸ÏòSPS start*/
int iBufSize; /*SPS ³¤¶È*/ int iBufSize; /*SPS ³¤¶È*/
@ -180,8 +428,13 @@ typedef struct T_GetBitContext{
int h264DecSeqParameterSet(void *pvBuf, T_SPS *ptSps); int h264DecSeqParameterSet(void *pvBuf, T_SPS *ptSps);
int h265DecSeqParameterSet( void *pvBufSrc, T_HEVCSPS *p_sps );
void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight); void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight);
void h265GetWidthHeight(T_HEVCSPS *ptSps, int *piWidth, int *piHeight);
void h264GeFramerate(T_SPS *ptSps, float *pfFramerate); void h264GeFramerate(T_SPS *ptSps, float *pfFramerate);
void h265GeFramerate(T_HEVCSPS *ptSps, float *pfFramerate);
#if defined (__cplusplus) #if defined (__cplusplus)
} }

View File

@ -35,9 +35,57 @@
#include "Util/util.h" #include "Util/util.h"
#include "Util/NoticeCenter.h" #include "Util/NoticeCenter.h"
#include "Extension/H264.h" #include "Extension/H264.h"
#include "Extension/H265.h"
#include "Extension/AAC.h" #include "Extension/AAC.h"
#include "Thread/WorkThreadPool.h" #include "Thread/WorkThreadPool.h"
#ifdef MP4_H265RECORD
#include "mov-buffer.h"
#include "mov-format.h"
#if defined(_WIN32) || defined(_WIN64)
#define fseek64 _fseeki64
#define ftell64 _ftelli64
#else
#define fseek64 fseek
#define ftell64 ftell
#endif
static int mov_file_read(void* fp, void* data, uint64_t bytes)
{
if (bytes == fread(data, 1, bytes, (FILE*)fp))
return 0;
return 0 != ferror((FILE*)fp) ? ferror((FILE*)fp) : -1 /*EOF*/;
}
static int mov_file_write(void* fp, const void* data, uint64_t bytes)
{
return bytes == fwrite(data, 1, bytes, (FILE*)fp) ? 0 : ferror((FILE*)fp);
}
static int mov_file_seek(void* fp, uint64_t offset)
{
return fseek64((FILE*)fp, offset, SEEK_SET);
}
static uint64_t mov_file_tell(void* fp)
{
return ftell64((FILE*)fp);
}
const struct mov_buffer_t* mov_file_buffer(void)
{
static struct mov_buffer_t s_io = {
mov_file_read,
mov_file_write,
mov_file_seek,
mov_file_tell,
};
return &s_io;
}
#endif
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
@ -101,17 +149,46 @@ void Mp4Maker::inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStam
break; break;
} }
} }
void Mp4Maker::inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
if (_strLastAudio.size()) { void Mp4Maker::inputH265(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastAudioTime; #ifdef MP4_H265RECORD
iTimeInc = MAX(0,MIN(iTimeInc,500)); auto iType = H265_TYPE(((uint8_t*)pData)[4]);
if(iTimeInc == 0 || iTimeInc == 500){ if (iType <= 19 ){
WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastAudioTime; if (_strLastVideo.size() && iType == 19){
} _strLastVideo.append((char *)pData,ui32Length);
inputAAC_l((char *) _strLastAudio.data(), _strLastAudio.size(), iTimeInc); inputH265_l((char *) _strLastVideo.data(), _strLastVideo.size(), ui32TimeStamp);
_strLastVideo = "";
_ui32LastVideoTime = ui32TimeStamp;
}else
inputH265_l((char *) pData, ui32Length, ui32TimeStamp);
}else{
_strLastVideo.append((char *)pData,ui32Length);
_ui32LastVideoTime = ui32TimeStamp;
} }
_strLastAudio.assign((char *)pData, ui32Length); #endif
_ui32LastAudioTime = ui32TimeStamp; }
void Mp4Maker::inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
#ifdef MP4_H265RECORD
if (_h265Record){
inputAAC_l((char *) pData, ui32Length, ui32TimeStamp);
}else
#endif
{
if (_strLastAudio.size()) {
int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastAudioTime;
iTimeInc = MAX(0,MIN(iTimeInc,500));
if(iTimeInc == 0 || iTimeInc == 500){
WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastAudioTime;
}
inputAAC_l((char *) _strLastAudio.data(), _strLastAudio.size(), iTimeInc);
}
_strLastAudio.assign((char *)pData, ui32Length);
_ui32LastAudioTime = ui32TimeStamp;
}
} }
void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) { void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
@ -127,17 +204,79 @@ void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Durati
} }
} }
void Mp4Maker::inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) { void Mp4Maker::inputH265_l(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp) {
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond); GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
if (!_haveVideo && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)) { #ifdef MP4_H265RECORD
int32_t compositionTime;
auto iType = H265_TYPE(((uint8_t*)pData)[4]);
if( iType >= H265Frame::NAL_IDR_W_RADL && (_movH265info.pMov == NULL || _ticker.elapsedTime() > recordSec * 1000)){
//在I帧率处新建MP4文件 //在I帧率处新建MP4文件
//如果文件未创建或者文件超过10分钟则创建新文件 //如果文件未创建或者文件超过10分钟则创建新文件
_h265Record = 1;
createFile(); createFile();
} }
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto duration = ui32Duration * _audioSampleRate /1000.0; char *pNualData = (char *)pData;
MP4WriteSample(_hMp4, _hAudio, (uint8_t*)pData, ui32Length,duration,0,false); if (/*iType <= 31 && */_movH265info.pMov!=NULL){
int vcl;
//media-server新版的api使用h265_annexbtomp4
//int n = h265_annexbtomp4(&_movH265info.hevc, pData, ui32Length, _sBbuffer, sizeof(_sBbuffer), &vcl);
int n = hevc_annexbtomp4(&_movH265info.hevc, pData, ui32Length, _sBbuffer, sizeof(_sBbuffer));
if (_movH265info.videoTrack < 0){
if (_movH265info.hevc.numOfArrays < 1){
return; // waiting for vps/sps/pps
}
uint8_t sExtraData[64 * 1024];
int extraDataSize = mpeg4_hevc_decoder_configuration_record_save(&_movH265info.hevc, sExtraData, sizeof(sExtraData));
if (extraDataSize <= 0){
// invalid HVCC
return;
}
// TODO: waiting for key frame ???
_movH265info.videoTrack = mov_writer_add_video(_movH265info.pMov, MOV_OBJECT_HEVC, _movH265info.width, _movH265info.height, sExtraData, extraDataSize);
if (_movH265info.videoTrack < 0)
return;
}
mov_writer_write(_movH265info.pMov,
_movH265info.videoTrack,
_sBbuffer,
n,
ui32TimeStamp,
ui32TimeStamp,
(iType >= 16 && iType <= 23) ? MOV_AV_FLAG_KEYFREAME : 0 );
// mov_writer_write(_movH265info.pMov, _movH265info.videoTrack, _sBbuffer, n, ui32TimeStamp, ui32TimeStamp, 1 == vcl ? MOV_AV_FLAG_KEYFREAME : 0);
}
#endif
}
void Mp4Maker::inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
#ifdef MP4_H265RECORD
if ( _h265Record )
{
if (!_haveVideo && (_movH265info.pMov == NULL || _ticker.elapsedTime() > recordSec * 1000)) {
createFile();
}
if (-1 != _movH265info.audioTrack && _movH265info.pMov != NULL){
mov_writer_write(_movH265info.pMov, _movH265info.audioTrack, (uint8_t*)pData, ui32Length, ui32Duration, ui32Duration, 0);
}
}else
#endif
{
if (!_haveVideo && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)) {
//在I帧率处新建MP4文件
//如果文件未创建或者文件超过10分钟则创建新文件
createFile();
}
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto duration = ui32Duration * _audioSampleRate /1000.0;
MP4WriteSample(_hMp4, _hAudio, (uint8_t*)pData, ui32Length,duration,0,false);
}
} }
} }
@ -169,60 +308,120 @@ void Mp4Maker::createFile() {
#else #else
File::createfile_path(strFileTmp.data(), 0); File::createfile_path(strFileTmp.data(), 0);
#endif #endif
_hMp4 = MP4Create(strFileTmp.data());
if (_hMp4 == MP4_INVALID_FILE_HANDLE) { #ifdef MP4_H265RECORD
WarnL << "创建MP4文件失败:" << strFileTmp; if ( _h265Record ){
return; memset(&_movH265info, 0, sizeof(_movH265info));
_movH265info.videoTrack = -1;
_movH265info.audioTrack = -1;
_movH265info.width = 0;
_movH265info.height = 0;
_movH265info.ptr = NULL;
_movH265info.pFile = fopen(strFileTmp.data(), "wb+");
_movH265info.pMov = mov_writer_create(mov_file_buffer(), _movH265info.pFile, 0/*MOV_FLAG_FASTSTART*/);
}else
#endif
{
_hMp4 = MP4Create(strFileTmp.data(),MP4_CREATE_64BIT_DATA);
if (_hMp4 == MP4_INVALID_FILE_HANDLE) {
WarnL << "创建MP4文件失败:" << strFileTmp;
return;
}
} }
//MP4SetTimeScale(_hMp4, 90000); //MP4SetTimeScale(_hMp4, 90000);
_strFileTmp = strFileTmp; _strFileTmp = strFileTmp;
_strFile = strFile; _strFile = strFile;
_ticker.resetTime(); _ticker.resetTime();
auto videoTrack = dynamic_pointer_cast<H264Track>(getTrack(TrackVideo)); if ( _h265Record ){
if(videoTrack){ auto videoTrack = dynamic_pointer_cast<H265Track>(getTrack(TrackVideo));
auto &sps = videoTrack->getSps(); #ifdef MP4_H265RECORD
auto &pps = videoTrack->getPps(); if(videoTrack){
_hVideo = MP4AddH264VideoTrack(_hMp4, _movH265info.width = videoTrack->getVideoWidth();
90000, _movH265info.height = videoTrack->getVideoHeight();
MP4_INVALID_DURATION,
videoTrack->getVideoWidth(),
videoTrack->getVideoHeight(),
sps[1],
sps[2],
sps[3],
3);
if(_hVideo != MP4_INVALID_TRACK_ID){
MP4AddH264SequenceParameterSet(_hMp4, _hVideo, (uint8_t *)sps.data(), sps.size());
MP4AddH264PictureParameterSet(_hMp4, _hVideo, (uint8_t *)pps.data(), pps.size());
}else{
WarnL << "添加视频通道失败:" << strFileTmp;
} }
#endif
}else {
auto videoTrack = dynamic_pointer_cast<H264Track>(getTrack(TrackVideo));
if(videoTrack){
auto &sps = videoTrack->getSps();
auto &pps = videoTrack->getPps();
_hVideo = MP4AddH264VideoTrack(_hMp4,
90000,
MP4_INVALID_DURATION,
videoTrack->getVideoWidth(),
videoTrack->getVideoHeight(),
sps[1],
sps[2],
sps[3],
3);
if(_hVideo != MP4_INVALID_TRACK_ID){
MP4AddH264SequenceParameterSet(_hMp4, _hVideo, (uint8_t *)sps.data(), sps.size());
MP4AddH264PictureParameterSet(_hMp4, _hVideo, (uint8_t *)pps.data(), pps.size());
}else{
WarnL << "添加视频通道失败:" << strFileTmp;
}
}
} }
auto audioTrack = dynamic_pointer_cast<AACTrack>(getTrack(TrackAudio)); auto audioTrack = dynamic_pointer_cast<AACTrack>(getTrack(TrackAudio));
if(audioTrack){ if(audioTrack){
_audioSampleRate = audioTrack->getAudioSampleRate(); _audioSampleRate = audioTrack->getAudioSampleRate();
_hAudio = MP4AddAudioTrack(_hMp4, _audioSampleRate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE); _audioChannel = audioTrack->getAudioChannel();
if (_hAudio != MP4_INVALID_TRACK_ID) { #ifdef MP4_H265RECORD
auto &cfg = audioTrack->getAacCfg(); uint8_t extra_data[64 * 1024];
MP4SetTrackESConfiguration(_hMp4, _hAudio,(uint8_t *)cfg.data(), cfg.size()); if ( _h265Record ){
}else{ _movH265info.audioTrack = mov_writer_add_audio(_movH265info.pMov, MOV_OBJECT_AAC, _audioChannel, 16, _audioSampleRate, audioTrack->getAacCfg().data(), 2);
WarnL << "添加音频通道失败:" << strFileTmp; if (-1 == _movH265info.audioTrack)
WarnL << "添加音频通道失败:" << strFileTmp;
}else
#endif
{
_hAudio = MP4AddAudioTrack(_hMp4, _audioSampleRate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto &cfg = audioTrack->getAacCfg();
MP4SetTrackESConfiguration(_hMp4, _hAudio,(uint8_t *)cfg.data(), cfg.size());
}else{
WarnL << "添加音频通道失败:" << strFileTmp;
}
} }
} }
} }
void Mp4Maker::asyncClose() { void Mp4Maker::asyncClose() {
auto hMp4 = _hMp4;
// auto hMp4 = (_h265Record==0)?_hMp4:_movH265info.pMov;
auto strFileTmp = _strFileTmp; auto strFileTmp = _strFileTmp;
auto strFile = _strFile; auto strFile = _strFile;
auto info = _info; auto info = _info;
WorkThreadPool::Instance().getExecutor()->async([hMp4,strFileTmp,strFile,info]() {
int h265Record = _h265Record;
#ifdef MP4_H265RECORD
FILE *pFile = (_h265Record)?_movH265info.pFile:NULL;
void * hMp4 = (_h265Record)?(void*)_movH265info.pMov:(void*)_hMp4;
#else
auto hMp4 = _hMp4;
FILE *pFile = NULL;
#endif
WorkThreadPool::Instance().getExecutor()->async([hMp4,strFileTmp,strFile,info,pFile,h265Record]() {
//获取文件录制时间放在MP4Close之前是为了忽略MP4Close执行时间 //获取文件录制时间放在MP4Close之前是为了忽略MP4Close执行时间
const_cast<Mp4Info&>(info).ui64TimeLen = ::time(NULL) - info.ui64StartedTime; const_cast<Mp4Info&>(info).ui64TimeLen = ::time(NULL) - info.ui64StartedTime;
//MP4Close非常耗时所以要放在后台线程执行 //MP4Close非常耗时所以要放在后台线程执行
MP4Close(hMp4,MP4_CLOSE_DO_NOT_COMPUTE_BITRATE);
#ifdef MP4_H265RECORD
if (h265Record){
mov_writer_destroy((mov_writer_t*)hMp4);
fclose(pFile);
}else
#endif
{
MP4Close(hMp4,MP4_CLOSE_DO_NOT_COMPUTE_BITRATE);
}
//临时文件名改成正式文件名防止mp4未完成时被访问 //临时文件名改成正式文件名防止mp4未完成时被访问
rename(strFileTmp.data(),strFile.data()); rename(strFileTmp.data(),strFile.data());
//获取文件大小 //获取文件大小
@ -235,11 +434,20 @@ void Mp4Maker::asyncClose() {
} }
void Mp4Maker::closeFile() { void Mp4Maker::closeFile() {
if (_hMp4 != MP4_INVALID_FILE_HANDLE) { #ifdef MP4_H265RECORD
asyncClose(); if (_h265Record){
_hMp4 = MP4_INVALID_FILE_HANDLE; if (_movH265info.pMov != NULL) {
_hVideo = MP4_INVALID_TRACK_ID; asyncClose();
_hAudio = MP4_INVALID_TRACK_ID; }
}else
#endif
{
if (_hMp4 != MP4_INVALID_FILE_HANDLE) {
asyncClose();
_hMp4 = MP4_INVALID_FILE_HANDLE;
_hVideo = MP4_INVALID_TRACK_ID;
_hAudio = MP4_INVALID_TRACK_ID;
}
} }
} }
@ -253,7 +461,10 @@ void Mp4Maker::onTrackFrame(const Frame::Ptr &frame) {
inputAAC(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp()); inputAAC(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp());
} }
break; break;
case CodecH265:{
inputH265(frame->data() , frame->size(),frame->stamp());
}
break;
default: default:
break; break;
} }

View File

@ -40,6 +40,12 @@
#include "Common/MediaSink.h" #include "Common/MediaSink.h"
#include "Extension/Track.h" #include "Extension/Track.h"
#ifdef MP4_H265RECORD
#include "mov-writer.h"
#include "mpeg4-hevc.h"
#endif
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
@ -57,6 +63,22 @@ public:
string strStreamId;//流ID string strStreamId;//流ID
string strVhost;//vhost string strVhost;//vhost
}; };
class MovH265Info {
public:
#ifdef MP4_H265RECORD
mov_writer_t* pMov;
struct mpeg4_hevc_t hevc;
int videoTrack;
int audioTrack;
int width;
int height;
const uint8_t* ptr;
FILE * pFile;
#endif
};
class Mp4Maker : public MediaSink{ class Mp4Maker : public MediaSink{
public: public:
typedef std::shared_ptr<Mp4Maker> Ptr; typedef std::shared_ptr<Mp4Maker> Ptr;
@ -83,13 +105,21 @@ private:
//时间戳参考频率1000 //时间戳参考频率1000
void inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp); void inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputH265(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
//时间戳参考频率1000 //时间戳参考频率1000
void inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp); void inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration); void inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
void inputH265_l(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration); void inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
private: private:
MovH265Info _movH265info;
int _h265Record = 0;
uint8_t _sBbuffer[2 * 1024 * 1024];
MP4FileHandle _hMp4 = MP4_INVALID_FILE_HANDLE; MP4FileHandle _hMp4 = MP4_INVALID_FILE_HANDLE;
MP4TrackId _hVideo = MP4_INVALID_TRACK_ID; MP4TrackId _hVideo = MP4_INVALID_TRACK_ID;
MP4TrackId _hAudio = MP4_INVALID_TRACK_ID; MP4TrackId _hAudio = MP4_INVALID_TRACK_ID;
string _strPath; string _strPath;
@ -106,6 +136,7 @@ private:
bool _haveVideo = false; bool _haveVideo = false;
int _audioSampleRate; int _audioSampleRate;
int _audioChannel;
}; };
} /* namespace mediakit */ } /* namespace mediakit */

View File

@ -180,7 +180,7 @@ void initEventListener() {
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaChanged, [](BroadcastMediaChangedArgs) { NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaChanged, [](BroadcastMediaChangedArgs) {
if (schema == RTMP_SCHEMA && app == "live") { if (schema == RTMP_SCHEMA && app == "live") {
lock_guard<mutex> lck(s_mtxFlvRecorder); lock_guard<mutex> lck(s_mtxFlvRecorder);
if (bRegist) { if (/*bRegist*/0) {
DebugL << "开始录制RTMP" << schema << " " << vhost << " " << app << " " << stream; DebugL << "开始录制RTMP" << schema << " " << vhost << " " << app << " " << stream;
GET_CONFIG(string, http_root, Http::kRootPath); GET_CONFIG(string, http_root, Http::kRootPath);
auto path = auto path =
@ -239,8 +239,9 @@ int main(int argc,char *argv[]) {
//这里是拉流地址支持rtmp/rtsp协议负载必须是H264+AAC //这里是拉流地址支持rtmp/rtsp协议负载必须是H264+AAC
//如果是其他不识别的音视频将会被忽略(譬如说h264+adpcm转发后会去除音频) //如果是其他不识别的音视频将会被忽略(譬如说h264+adpcm转发后会去除音频)
auto urlList = {"rtmp://live.hkstv.hk.lxdns.com/live/hks1", auto urlList = {
"rtmp://live.hkstv.hk.lxdns.com/live/hks2" // "rtsp://admin:admin123@192.168.5.82/",
"rtsp://192.168.5.24/live/chn0",
//rtsp链接支持输入用户名密码 //rtsp链接支持输入用户名密码
/*"rtsp://admin:jzan123456@192.168.0.122/"*/}; /*"rtsp://admin:jzan123456@192.168.0.122/"*/};
map<string, PlayerProxy::Ptr> proxyMap; map<string, PlayerProxy::Ptr> proxyMap;
@ -259,7 +260,7 @@ int main(int argc,char *argv[]) {
//rtsp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4 //rtsp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
//rtmp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4 //rtmp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "live", to_string(i).data())); PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "live", to_string(i).data(),true,true,true,true));
//指定RTP over TCP(播放rtsp时有效) //指定RTP over TCP(播放rtsp时有效)
(*player)[kRtpType] = Rtsp::RTP_TCP; (*player)[kRtpType] = Rtsp::RTP_TCP;
//开始播放,如果播放失败或者播放中止,将会自动重试若干次,重试次数在配置文件中配置,默认一直重试 //开始播放,如果播放失败或者播放中止,将会自动重试若干次,重试次数在配置文件中配置,默认一直重试
@ -276,7 +277,7 @@ int main(int argc,char *argv[]) {
" http-flv地址 : http://127.0.0.1/live/0.flv\n" " http-flv地址 : http://127.0.0.1/live/0.flv\n"
" rtsp地址 : rtsp://127.0.0.1/live/0\n" " rtsp地址 : rtsp://127.0.0.1/live/0\n"
" rtmp地址 : rtmp://127.0.0.1/live/0"; " rtmp地址 : rtmp://127.0.0.1/live/0";
#if 1
//加载证书,证书包含公钥和私钥 //加载证书,证书包含公钥和私钥
SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data()); SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data());
//信任某个自签名证书 //信任某个自签名证书
@ -293,12 +294,12 @@ int main(int argc,char *argv[]) {
//简单的telnet服务器可用于服务器调试但是不能使用23端口否则telnet上了莫名其妙的现象 //简单的telnet服务器可用于服务器调试但是不能使用23端口否则telnet上了莫名其妙的现象
//测试方法:telnet 127.0.0.1 9000 //测试方法:telnet 127.0.0.1 9000
TcpServer::Ptr shellSrv(new TcpServer()); // TcpServer::Ptr shellSrv(new TcpServer());
TcpServer::Ptr rtspSrv(new TcpServer()); TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer()); TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer()); TcpServer::Ptr httpSrv(new TcpServer());
shellSrv->start<ShellSession>(shellPort); // shellSrv->start<ShellSession>(shellPort);
rtspSrv->start<RtspSession>(rtspPort);//默认554 rtspSrv->start<RtspSession>(rtspPort);//默认554
rtmpSrv->start<RtmpSession>(rtmpPort);//默认1935 rtmpSrv->start<RtmpSession>(rtmpPort);//默认1935
//http服务器,支持websocket //http服务器,支持websocket
@ -316,11 +317,13 @@ int main(int argc,char *argv[]) {
//服务器支持动态切换端口(不影响现有连接) //服务器支持动态切换端口(不影响现有连接)
NoticeCenter::Instance().addListener(ReloadConfigTag,Broadcast::kBroadcastReloadConfig,[&](BroadcastReloadConfigArgs){ NoticeCenter::Instance().addListener(ReloadConfigTag,Broadcast::kBroadcastReloadConfig,[&](BroadcastReloadConfigArgs){
//重新创建服务器 //重新创建服务器
#if 0
if(shellPort != mINI::Instance()[Shell::kPort].as<uint16_t>()){ if(shellPort != mINI::Instance()[Shell::kPort].as<uint16_t>()){
shellPort = mINI::Instance()[Shell::kPort]; shellPort = mINI::Instance()[Shell::kPort];
shellSrv->start<ShellSession>(shellPort); shellSrv->start<ShellSession>(shellPort);
InfoL << "重启shell服务器:" << shellPort; InfoL << "重启shell服务器:" << shellPort;
} }
#endif
if(rtspPort != mINI::Instance()[Rtsp::kPort].as<uint16_t>()){ if(rtspPort != mINI::Instance()[Rtsp::kPort].as<uint16_t>()){
rtspPort = mINI::Instance()[Rtsp::kPort]; rtspPort = mINI::Instance()[Rtsp::kPort];
rtspSrv->start<RtspSession>(rtspPort); rtspSrv->start<RtspSession>(rtspPort);
@ -331,6 +334,7 @@ int main(int argc,char *argv[]) {
rtmpSrv->start<RtmpSession>(rtmpPort); rtmpSrv->start<RtmpSession>(rtmpPort);
InfoL << "重启rtmp服务器" << rtmpPort; InfoL << "重启rtmp服务器" << rtmpPort;
} }
#if 1
if(httpPort != mINI::Instance()[Http::kPort].as<uint16_t>()){ if(httpPort != mINI::Instance()[Http::kPort].as<uint16_t>()){
httpPort = mINI::Instance()[Http::kPort]; httpPort = mINI::Instance()[Http::kPort];
httpSrv->start<EchoWebSocketSession>(httpPort); httpSrv->start<EchoWebSocketSession>(httpPort);
@ -341,6 +345,7 @@ int main(int argc,char *argv[]) {
httpsSrv->start<SSLEchoWebSocketSession>(httpsPort); httpsSrv->start<SSLEchoWebSocketSession>(httpsPort);
InfoL << "重启https服务器" << httpsPort; InfoL << "重启https服务器" << httpsPort;
} }
#endif
if(rtspsPort != mINI::Instance()[Rtsp::kSSLPort].as<uint16_t>()){ if(rtspsPort != mINI::Instance()[Rtsp::kSSLPort].as<uint16_t>()){
rtspsPort = mINI::Instance()[Rtsp::kSSLPort]; rtspsPort = mINI::Instance()[Rtsp::kSSLPort];
@ -348,7 +353,7 @@ int main(int argc,char *argv[]) {
InfoL << "重启rtsps服务器" << rtspsPort; InfoL << "重启rtsps服务器" << rtspsPort;
} }
}); });
#endif
//设置退出信号处理函数 //设置退出信号处理函数
static semaphore sem; static semaphore sem;
signal(SIGINT, [](int) { sem.post(); });// 设置退出信号 signal(SIGINT, [](int) { sem.post(); });// 设置退出信号