Merge pull request #4 from zlmediakit/master

update
This commit is contained in:
baiyfcu 2019-08-12 18:25:04 +08:00 committed by GitHub
commit d3a4fc1f4a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
66 changed files with 2516 additions and 356 deletions

@ -1 +1 @@
Subproject commit 936d3c05b183cba279bb348f8eac9eca0cc810c2
Subproject commit 2bb234006c852b1d1a61a0e9a7f39dde7105fe34

@ -1 +1 @@
Subproject commit e399b93802610dcf574ff64bcb7677572cd028c1
Subproject commit 6df71e01c174cdfe69e597cc4acb766a20b28620

View File

@ -1,4 +1,4 @@
#include <jni.h>
#include <jni.h>
#include <string>
#include "test_server.cpp"

View File

@ -36,6 +36,7 @@ set(ENABLE_MYSQL true)
set(ENABLE_MP4V2 true)
set(ENABLE_FAAC true)
set(ENABLE_X264 true)
set(ENABLE_MP4RECORD true)
#
if(ENABLE_HLS)
@ -47,6 +48,12 @@ else()
set(LINK_LIB_LIST zlmediakit zltoolkit)
endif()
if(ENABLE_MP4RECORD)
message(STATUS "ENABLE_MP4RECORD defined")
add_definitions(-DENABLE_MP4RECORD)
set(MediaServer_Root ${CMAKE_SOURCE_DIR}/3rdpart/media-server)
list(APPEND LINK_LIB_LIST mov flv)
endif()
#openssl
find_package(OpenSSL QUIET)
if (OPENSSL_FOUND AND ENABLE_OPENSSL)
@ -111,6 +118,21 @@ if(ENABLE_HLS)
endif(WIN32)
endif()
if(ENABLE_MP4RECORD)
aux_source_directory(${MediaServer_Root}/libmov/include src_mov)
aux_source_directory(${MediaServer_Root}/libmov/source src_mov)
include_directories(${MediaServer_Root}/libmov/include)
aux_source_directory(${MediaServer_Root}/libflv/include src_flv)
aux_source_directory(${MediaServer_Root}/libflv/source src_flv)
include_directories(${MediaServer_Root}/libflv/include)
add_library(mov STATIC ${src_mov})
add_library(flv STATIC ${src_flv})
if(WIN32)
set_target_properties(mov flv PROPERTIES COMPILE_FLAGS ${VS_FALGS} )
endif(WIN32)
endif()
if (WIN32)
list(APPEND LINK_LIB_LIST WS2_32 Iphlpapi shlwapi)
set_target_properties(zltoolkit PROPERTIES COMPILE_FLAGS ${VS_FALGS} )

View File

@ -63,7 +63,7 @@
| RTMP --> RTSP[S] | Y | N | Y | N |
| RTSP[S] --> HLS | Y | Y | Y | N |
| RTMP --> HLS | Y | N | Y | N |
| RTSP[S] --> MP4 | Y | N | Y | N |
| RTSP[S] --> MP4 | Y | Y | Y | N |
| RTMP --> MP4 | Y | N | Y | N |
| MP4 --> RTSP[S] | Y | N | Y | N |
| MP4 --> RTMP | Y | N | Y | N |
@ -73,9 +73,9 @@
| feature/codec | H264 | H265 | AAC | other |
| :-----------: | :--: | :--: | :--: | :---: |
| RTSP[S] push | Y | Y | Y | Y |
| RTSP proxy | Y | Y | Y | N |
| RTSP proxy | Y | Y | Y | Y |
| RTMP push | Y | Y | Y | Y |
| RTMP proxy | Y | N | Y | N |
| RTMP proxy | Y | Y | Y | Y |
- RTP transport:

View File

@ -78,7 +78,7 @@
| RTMP --> RTSP[S] | Y | N | Y | N |
| RTSP[S] --> HLS | Y | Y | Y | N |
| RTMP --> HLS | Y | N | Y | N |
| RTSP[S] --> MP4 | Y | N | Y | N |
| RTSP[S] --> MP4 | Y | Y | Y | N |
| RTMP --> MP4 | Y | N | Y | N |
| MP4 --> RTSP[S] | Y | N | Y | N |
| MP4 --> RTMP | Y | N | Y | N |
@ -88,9 +88,9 @@
| 功能/编码格式 | H264 | H265 | AAC | other |
| :------------------------------: | :--: | :--: | :--: | :---: |
| RTSP[S]推流 | Y | Y | Y | Y |
| RTSP拉流代理 | Y | Y | Y | N |
| RTSP拉流代理 | Y | Y | Y | Y |
| RTMP推流 | Y | Y | Y | Y |
| RTMP拉流代理 | Y | N | Y | N |
| RTMP拉流代理 | Y | Y | Y | Y |
- RTP传输方式:

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -127,6 +127,14 @@ public:
"日志等级,LTrace~LError(0~4)",/*该选项说明文字*/
nullptr);
(*_parser) << Option('m',/*该选项简称,如果是\x00则说明无简称*/
"max_day",/*该选项全称,每个选项必须有全称不得为null或空字符串*/
Option::ArgRequired,/*该选项后面必须跟值*/
"7",/*该选项默认值*/
false,/*该选项是否必须赋值如果没有默认值且为ArgRequired时用户必须提供该参数否则将抛异常*/
"日志最多保存天数",/*该选项说明文字*/
nullptr);
(*_parser) << Option('c',/*该选项简称,如果是\x00则说明无简称*/
"config",/*该选项全称,每个选项必须有全称不得为null或空字符串*/
Option::ArgRequired,/*该选项后面必须跟值*/
@ -216,11 +224,10 @@ int main(int argc,char *argv[]) {
//设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel", logLevel));
#if defined(__linux__) || defined(__linux)
Logger::Instance().add(std::make_shared<SysLogChannel>("SysLogChannel",logLevel));
#else
Logger::Instance().add(std::make_shared<FileChannel>("FileChannel", exePath() + ".log", logLevel));
#endif
auto fileChannel = std::make_shared<FileChannel>("FileChannel", exeDir() + "log/", logLevel);
//日志最多保存天数
fileChannel->setMaxDay(cmd_main["max_day"]);
Logger::Instance().add(fileChannel);
#if !defined(_WIN32)
if (bDaemon) {

View File

@ -41,7 +41,7 @@ void MediaSink::addTrack(const Track::Ptr &track_in) {
if(!strongSelf){
return;
}
if(strongSelf->_allTrackReady){
if(!strongSelf->_anyTrackUnReady){
strongSelf->onTrackFrame(frame);
}
}));
@ -53,6 +53,7 @@ void MediaSink::addTrack(const Track::Ptr &track_in) {
if(track->ready()){
lam();
}else{
_anyTrackUnReady = true;
_allTrackReady = false;
_trackReadyCallback[codec_id] = lam;
_ticker.resetTime();
@ -79,7 +80,7 @@ void MediaSink::inputFrame(const Frame::Ptr &frame) {
if(!_allTrackReady && (_trackReadyCallback.empty() || _ticker.elapsedTime() > MAX_WAIT_MS)){
_allTrackReady = true;
_anyTrackUnReady = false;
if(!_trackReadyCallback.empty()){
//这是超时强制忽略未准备好的Track
_trackReadyCallback.clear();

View File

@ -99,6 +99,7 @@ private:
map<int,Track::Ptr> _track_map;
map<int,function<void()> > _trackReadyCallback;
bool _allTrackReady = false;
bool _anyTrackUnReady = false;
Ticker _ticker;
};

View File

@ -1,4 +1,4 @@
//
//
// Created by xzl on 2019/6/28.
//

View File

@ -1,4 +1,4 @@
//
//
// Created by xzl on 2019/6/28.
//

View File

@ -260,11 +260,15 @@ const string kFileSecond = RECORD_FIELD"fileSecond";
#define RECORD_FILE_PATH HTTP_ROOT_PATH
const string kFilePath = RECORD_FIELD"filePath";
//mp4文件写缓存大小
const string kFileBufSize = RECORD_FIELD"fileBufSize";
onceToken token([](){
mINI::Instance()[kAppName] = RECORD_APP_NAME;
mINI::Instance()[kSampleMS] = RECORD_SAMPLE_MS;
mINI::Instance()[kFileSecond] = RECORD_FILE_SECOND;
mINI::Instance()[kFilePath] = RECORD_FILE_PATH;
mINI::Instance()[kFileBufSize] = 64 * 1024;
},nullptr);
} //namespace Record

View File

@ -255,6 +255,8 @@ extern const string kSampleMS;
extern const string kFileSecond;
//录制文件路径
extern const string kFilePath;
//mp4文件写缓存大小
extern const string kFileBufSize;
} //namespace Record
////////////HLS相关配置///////////

View File

@ -79,6 +79,10 @@ public:
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
public:
unsigned int syncword = 0; //12 bslbf 同步字The bit string 1111 1111 1111说明一个ADTS帧的开始
unsigned int id; //1 bslbf MPEG 标示符, 设置为1
@ -127,6 +131,10 @@ public:
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
} ;

View File

@ -36,10 +36,10 @@ namespace mediakit{
Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
if (strcasecmp(track->_codec.data(), "mpeg4-generic") == 0) {
string aac_cfg_str = FindField(track->_fmtp.data(), "config=", nullptr);
if (aac_cfg_str.size() != 4) {
if (aac_cfg_str.empty()) {
aac_cfg_str = FindField(track->_fmtp.data(), "config=", ";");
}
if (aac_cfg_str.size() != 4) {
if (aac_cfg_str.empty()) {
//延后获取adts头
return std::make_shared<AACTrack>();
}
@ -76,8 +76,14 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
if (strcasecmp(track->_codec.data(), "h265") == 0) {
//a=fmtp:96 sprop-sps=QgEBAWAAAAMAsAAAAwAAAwBdoAKAgC0WNrkky/AIAAADAAgAAAMBlQg=; sprop-pps=RAHA8vA8kAA=
int pt;
int pt, id;
char sprop_vps[128] = {0},sprop_sps[128] = {0},sprop_pps[128] = {0};
if (5 == sscanf(track->_fmtp.data(), "%d profile-id=%d; sprop-sps=%127[^;]; sprop-pps=%127[^;]; sprop-vps=%127[^;]", &pt, &id, sprop_sps,sprop_pps, sprop_vps)) {
auto vps = decodeBase64(sprop_vps);
auto sps = decodeBase64(sprop_sps);
auto pps = decodeBase64(sprop_pps);
return std::make_shared<H265Track>(vps,sps,pps,0,0,0);
}
if (4 == sscanf(track->_fmtp.data(), "%d sprop-vps=%127[^;]; sprop-sps=%127[^;]; sprop-pps=%127[^;]", &pt, sprop_vps,sprop_sps, sprop_pps)) {
auto vps = decodeBase64(sprop_vps);
auto sps = decodeBase64(sprop_sps);

View File

@ -117,6 +117,12 @@ public:
*/
virtual bool keyFrame() const = 0;
/**
* sps pps vps
* @return
*/
virtual bool configFrame() const = 0;
/**
*
*/
@ -371,6 +377,7 @@ public:
_trackType = frame->getTrackType();
_codec = frame->getCodecId();
_key = frame->keyFrame();
_config = frame->configFrame();
}
virtual ~FrameCacheAble() = default;
@ -394,12 +401,17 @@ public:
bool keyFrame() const override{
return _key;
}
bool configFrame() const override{
return _config;
}
private:
Frame::Ptr _frame;
BufferRaw::Ptr _buffer;
TrackType _trackType;
CodecId _codec;
bool _key;
bool _config;
};

View File

@ -32,9 +32,6 @@ using namespace toolkit;
namespace mediakit{
bool getAVCInfo(const string& strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps) {
return getAVCInfo(strSps.data(),strSps.size(),iVideoWidth,iVideoHeight,iVideoFps);
}
bool getAVCInfo(const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight, float &iVideoFps){
T_GetBitContext tGetBitBuf;
T_SPS tH264SpsInfo;
@ -51,6 +48,9 @@ bool getAVCInfo(const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight
return true;
}
bool getAVCInfo(const string& strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps) {
return getAVCInfo(strSps.data(),strSps.size(),iVideoWidth,iVideoHeight,iVideoFps);
}
const char *memfind(const char *buf, int len, const char *subbuf, int sublen) {
for (auto i = 0; i < len - sublen; ++i) {

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@ -36,7 +36,6 @@ using namespace toolkit;
namespace mediakit{
bool getAVCInfo(const string &strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
bool getAVCInfo(const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
void splitH264(const char *ptr, int len, const std::function<void(const char *, int)> &cb);
/**
@ -80,13 +79,21 @@ public:
}
bool keyFrame() const override {
return type == NAL_IDR;
return H264_TYPE(buffer[iPrefixSize]) == H264Frame::NAL_IDR;
}
bool configFrame() const override{
switch(H264_TYPE(buffer[iPrefixSize]) ){
case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS:
return true;
default:
return false;
}
}
public:
uint16_t sequence;
uint32_t timeStamp;
uint32_t ptsStamp = 0;
unsigned char type;
string buffer;
uint32_t iPrefixSize = 4;
};
@ -120,6 +127,16 @@ public:
bool keyFrame() const override {
return H264_TYPE(_ptr[_prefixSize]) == H264Frame::NAL_IDR;
}
bool configFrame() const override{
switch(H264_TYPE(_ptr[_prefixSize])){
case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS:
return true;
default:
return false;
}
}
};
/**
@ -326,7 +343,6 @@ private:
if(!_sps.empty()){
auto spsFrame = std::make_shared<H264Frame>();
spsFrame->type = H264Frame::NAL_SPS;
spsFrame->iPrefixSize = 4;
spsFrame->buffer.assign("\x0\x0\x0\x1",4);
spsFrame->buffer.append(_sps);
@ -336,7 +352,6 @@ private:
if(!_pps.empty()){
auto ppsFrame = std::make_shared<H264Frame>();
ppsFrame->type = H264Frame::NAL_PPS;
ppsFrame->iPrefixSize = 4;
ppsFrame->buffer.assign("\x0\x0\x0\x1",4);
ppsFrame->buffer.append(_pps);

View File

@ -80,7 +80,6 @@ bool H264RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
inline void H264RtmpDecoder::onGetH264(const char* pcData, int iLen, uint32_t dts,uint32_t pts) {
#if 1
_h264frame->type = H264_TYPE(pcData[0]);
_h264frame->timeStamp = dts;
_h264frame->ptsStamp = pts;
_h264frame->buffer.assign("\x0\x0\x0\x1", 4); //添加264头

View File

@ -100,12 +100,10 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
//a full frame
_h264frame->buffer.assign("\x0\x0\x0\x1", 4);
_h264frame->buffer.append((char *)frame, length);
_h264frame->type = nal.type;
_h264frame->timeStamp = rtppack->timeStamp;
_h264frame->sequence = rtppack->sequence;
auto isIDR = _h264frame->type == H264Frame::NAL_IDR;
auto key = _h264frame->keyFrame();
onGetH264(_h264frame);
return (isIDR); //i frame
return (key); //i frame
}
switch (nal.type){
@ -131,9 +129,7 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
MakeNalu(ptr[0], nal);
_h264frame->buffer.assign("\x0\x0\x0\x1", 4);
_h264frame->buffer.append((char *)ptr, len);
_h264frame->type = nal.type;
_h264frame->timeStamp = rtppack->timeStamp;
_h264frame->sequence = rtppack->sequence;
if(nal.type == H264Frame::NAL_IDR){
haveIDR = true;
}
@ -148,35 +144,39 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
//FU-A
FU fu;
MakeFU(frame[1], fu);
if (fu.S == 1) {
//FU-A start
if (fu.S) {
//该帧的第一个rtp包
char tmp = (nal.forbidden_zero_bit << 7 | nal.nal_ref_idc << 5 | fu.type);
_h264frame->buffer.assign("\x0\x0\x0\x1", 4);
_h264frame->buffer.push_back(tmp);
_h264frame->buffer.append((char *)frame + 2, length - 2);
_h264frame->type = fu.type;
_h264frame->timeStamp = rtppack->timeStamp;
_h264frame->sequence = rtppack->sequence;
return (_h264frame->type == H264Frame::NAL_IDR); //i frame
//该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence;
return _h264frame->keyFrame();
}
if (rtppack->sequence != (uint16_t)(_h264frame->sequence + 1)) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃
_h264frame->buffer.clear();
WarnL << "丢包,帧废弃:" << rtppack->sequence << "," << _h264frame->sequence;
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false;
}
_h264frame->sequence = rtppack->sequence;
if (fu.E == 1) {
//FU-A end
if (!fu.E) {
//该帧的中间rtp包
_h264frame->buffer.append((char *)frame + 2, length - 2);
_h264frame->timeStamp = rtppack->timeStamp;
auto isIDR = _h264frame->type == H264Frame::NAL_IDR;
onGetH264(_h264frame);
return isIDR;
//该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence;
return false;
}
//FU-A mid
//该帧最后一个rtp包
_h264frame->buffer.append((char *)frame + 2, length - 2);
return false;
_h264frame->timeStamp = rtppack->timeStamp;
auto key = _h264frame->keyFrame();
onGetH264(_h264frame);
return key;
}
default:{
@ -195,10 +195,8 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) {
//写入环形缓存
auto lastSeq = _h264frame->sequence;
RtpCodec::inputFrame(frame);
_h264frame = obtainFrame();
_h264frame->sequence = lastSeq;
}

View File

@ -64,6 +64,7 @@ private:
H264Frame::Ptr obtainFrame();
private:
H264Frame::Ptr _h264frame;
int _lastSeq = 0;
};
/**

View File

@ -25,9 +25,47 @@
*/
#include "H265.h"
#include "SPSParser.h"
#include "Util/logger.h"
namespace mediakit{
bool getHEVCInfo(const char * vps, int vps_len,const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight, float &iVideoFps){
T_GetBitContext tGetBitBuf;
T_HEVCSPS tH265SpsInfo;
T_HEVCVPS tH265VpsInfo;
if ( vps_len > 2 ){
memset(&tGetBitBuf,0,sizeof(tGetBitBuf));
memset(&tH265VpsInfo,0,sizeof(tH265VpsInfo));
tGetBitBuf.pu8Buf = (uint8_t*)vps+2;
tGetBitBuf.iBufSize = vps_len-2;
if(0 != h265DecVideoParameterSet((void *) &tGetBitBuf, &tH265VpsInfo)){
return false;
}
}
if ( sps_len > 2 ){
memset(&tGetBitBuf,0,sizeof(tGetBitBuf));
memset(&tH265SpsInfo,0,sizeof(tH265SpsInfo));
tGetBitBuf.pu8Buf = (uint8_t*)sps+2;
tGetBitBuf.iBufSize = sps_len-2;
if(0 != h265DecSeqParameterSet((void *) &tGetBitBuf, &tH265SpsInfo)){
return false;
}
}
else
return false;
h265GetWidthHeight(&tH265SpsInfo, &iVideoWidth, &iVideoHeight);
iVideoFps = 0;
h265GeFramerate(&tH265VpsInfo, &tH265SpsInfo, &iVideoFps);
// ErrorL << iVideoWidth << " " << iVideoHeight << " " << iVideoFps;
return true;
}
bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps) {
return getHEVCInfo(strVps.data(),strVps.size(),strSps.data(),strSps.size(),iVideoWidth,iVideoHeight,iVideoFps);
}
Sdp::Ptr H265Track::getSdp() {
if(!ready()){
WarnL << "H265 Track未准备好";

View File

@ -36,6 +36,8 @@ using namespace toolkit;
namespace mediakit {
bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
/**
* 265
*/
@ -83,6 +85,10 @@ public:
return timeStamp;
}
uint32_t pts() const override {
return ptsStamp ? ptsStamp : timeStamp;
}
uint32_t prefixSize() const override {
return iPrefixSize;
}
@ -96,7 +102,18 @@ public:
}
bool keyFrame() const override {
return isKeyFrame(type);
return isKeyFrame(H265_TYPE(buffer[iPrefixSize]));
}
bool configFrame() const override{
switch(H265_TYPE(buffer[iPrefixSize])){
case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS:
return true;
default:
return false;
}
}
static bool isKeyFrame(int type) {
@ -114,9 +131,8 @@ public:
}
public:
uint16_t sequence;
uint32_t timeStamp;
unsigned char type;
uint32_t ptsStamp = 0;
string buffer;
uint32_t iPrefixSize = 4;
};
@ -143,8 +159,18 @@ public:
}
bool keyFrame() const override {
int type = H265_TYPE(((uint8_t *) _ptr)[_prefixSize]);
return H265Frame::isKeyFrame(type);
return H265Frame::isKeyFrame(H265_TYPE(((uint8_t *) _ptr)[_prefixSize]));
}
bool configFrame() const override{
switch(H265_TYPE(((uint8_t *) _ptr)[_prefixSize])){
case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS:
return true;
default:
return false;
}
}
};
@ -176,6 +202,7 @@ public:
_vps = vps.substr(vps_prefix_len);
_sps = sps.substr(sps_prefix_len);
_pps = pps.substr(pps_prefix_len);
onReady();
}
/**
@ -206,6 +233,30 @@ public:
return CodecH265;
}
/**
*
* @return
*/
int getVideoHeight() const override{
return _height ;
}
/**
*
* @return
*/
int getVideoWidth() const override{
return _width;
}
/**
* fps
* @return
*/
float getVideoFps() const override{
return _fps;
}
bool ready() override {
return !_vps.empty() && !_sps.empty() && !_pps.empty();
}
@ -280,6 +331,12 @@ private:
}
}
/**
* sps获取宽高fps
*/
void onReady(){
getHEVCInfo(_vps, _sps, _width, _height, _fps);
}
Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this);
}
@ -294,7 +351,6 @@ private:
}
if(!_vps.empty()){
auto vpsFrame = std::make_shared<H265Frame>();
vpsFrame->type = H265Frame::NAL_VPS;
vpsFrame->iPrefixSize = 4;
vpsFrame->buffer.assign("\x0\x0\x0\x1", 4);
vpsFrame->buffer.append(_vps);
@ -303,7 +359,6 @@ private:
}
if (!_sps.empty()) {
auto spsFrame = std::make_shared<H265Frame>();
spsFrame->type = H265Frame::NAL_SPS;
spsFrame->iPrefixSize = 4;
spsFrame->buffer.assign("\x0\x0\x0\x1", 4);
spsFrame->buffer.append(_sps);
@ -313,7 +368,6 @@ private:
if (!_pps.empty()) {
auto ppsFrame = std::make_shared<H265Frame>();
ppsFrame->type = H265Frame::NAL_PPS;
ppsFrame->iPrefixSize = 4;
ppsFrame->buffer.assign("\x0\x0\x0\x1", 4);
ppsFrame->buffer.append(_pps);
@ -325,6 +379,9 @@ private:
string _vps;
string _sps;
string _pps;
int _width = 0;
int _height = 0;
float _fps = 0;
bool _last_frame_is_idr = false;
};

View File

@ -99,56 +99,56 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
// fragmentation unit (FU)
FU fu;
MakeFU(frame[2], fu);
if (fu.S == 1) {
//FU-A start
if (fu.S) {
//该帧的第一个rtp包
_h265frame->buffer.assign("\x0\x0\x0\x1", 4);
_h265frame->buffer.push_back(fu.type << 1);
_h265frame->buffer.push_back(0x01);
_h265frame->buffer.append((char *) frame + 3, length - 3);
_h265frame->type = fu.type;
_h265frame->timeStamp = rtppack->timeStamp;
_h265frame->sequence = rtppack->sequence;
//该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence;
return (_h265frame->keyFrame()); //i frame
}
if (rtppack->sequence != (uint16_t) (_h265frame->sequence + 1)) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃
_h265frame->buffer.clear();
WarnL << "丢包,帧废弃:" << rtppack->sequence << "," << _h265frame->sequence;
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false;
}
_h265frame->sequence = rtppack->sequence;
if (fu.E == 1) {
//FU-A end
if (!fu.E) {
//该帧的中间rtp包
_h265frame->buffer.append((char *) frame + 3, length - 3);
_h265frame->timeStamp = rtppack->timeStamp;
auto isIDR = _h265frame->keyFrame();
onGetH265(_h265frame);
return isIDR;
//该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence;
return false;
}
//FU-A mid
//该帧最后一个rtp包
_h265frame->buffer.append((char *) frame + 3, length - 3);
return false;
_h265frame->timeStamp = rtppack->timeStamp;
auto key = _h265frame->keyFrame();
onGetH265(_h265frame);
return key;
}
default: // 4.4.1. Single NAL Unit Packets (p24)
//a full frame
_h265frame->buffer.assign("\x0\x0\x0\x1", 4);
_h265frame->buffer.append((char *)frame, length);
_h265frame->type = nal;
_h265frame->timeStamp = rtppack->timeStamp;
_h265frame->sequence = rtppack->sequence;
auto isIDR = _h265frame->keyFrame();
auto key = _h265frame->keyFrame();
onGetH265(_h265frame);
return (isIDR); //i frame
return key;
}
}
void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) {
//写入环形缓存
auto lastSeq = _h265frame->sequence;
RtpCodec::inputFrame(frame);
_h265frame = obtainFrame();
_h265frame->sequence = lastSeq;
}

View File

@ -65,6 +65,7 @@ private:
H265Frame::Ptr obtainFrame();
private:
H265Frame::Ptr _h265frame;
int _lastSeq = 0;
};
/**

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,15 @@
#define QP_MAX_NUM (51 + 6*6) // The maximum supported qp
#define HEVC_MAX_SHORT_TERM_RPS_COUNT 64
#define T_PROFILE_HEVC_MAIN 1
#define T_PROFILE_HEVC_MAIN_10 2
#define T_PROFILE_HEVC_MAIN_STILL_PICTURE 3
#define T_PROFILE_HEVC_REXT 4
/**
* Chromaticity coordinates of the source primaries.
*/
@ -67,6 +76,62 @@ enum T_AVColorSpace {
};
enum {
// 7.4.3.1: vps_max_layers_minus1 is in [0, 62].
HEVC_MAX_LAYERS = 63,
// 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6].
HEVC_MAX_SUB_LAYERS = 7,
// 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023].
HEVC_MAX_LAYER_SETS = 1024,
// 7.4.2.1: vps_video_parameter_set_id is u(4).
HEVC_MAX_VPS_COUNT = 16,
// 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15].
HEVC_MAX_SPS_COUNT = 16,
// 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63].
HEVC_MAX_PPS_COUNT = 64,
// A.4.2: MaxDpbSize is bounded above by 16.
HEVC_MAX_DPB_SIZE = 16,
// 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1].
HEVC_MAX_REFS = HEVC_MAX_DPB_SIZE,
// 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64].
HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64,
// 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32].
HEVC_MAX_LONG_TERM_REF_PICS = 32,
// A.3: all profiles require that CtbLog2SizeY is in [4, 6].
HEVC_MIN_LOG2_CTB_SIZE = 4,
HEVC_MAX_LOG2_CTB_SIZE = 6,
// E.3.2: cpb_cnt_minus1[i] is in [0, 31].
HEVC_MAX_CPB_CNT = 32,
// A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584.
HEVC_MAX_LUMA_PS = 35651584,
// A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are
// constrained to be not greater than sqrt(MaxLumaPs * 8). Hence height/
// width are bounded above by sqrt(8 * 35651584) = 16888.2 samples.
HEVC_MAX_WIDTH = 16888,
HEVC_MAX_HEIGHT = 16888,
// A.4.1: table A.6 allows at most 22 tile rows for any level.
HEVC_MAX_TILE_ROWS = 22,
// A.4.1: table A.6 allows at most 20 tile columns for any level.
HEVC_MAX_TILE_COLUMNS = 20,
// 7.4.7.1: in the worst case (tiles_enabled_flag and
// entropy_coding_sync_enabled_flag are both set), entry points can be
// placed at the beginning of every Ctb row in every tile, giving an
// upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1.
// Only a stream with very high resolution and perverse parameters could
// get near that, though, so set a lower limit here with the maximum
// possible value for 4K video (at most 135 16x16 Ctb rows).
HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135,
};
/**
* rational number numerator/denominator
*/
@ -170,6 +235,209 @@ typedef struct T_PPS {
int iChromaQpDiff;
} T_PPS;
typedef struct T_HEVCWindow {
unsigned int uiLeftOffset;
unsigned int uiRightOffset;
unsigned int uiTopOffset;
unsigned int uiBottomOffset;
} T_HEVCWindow;
typedef struct T_VUI {
T_AVRational tSar;
int iOverscanInfoPresentFlag;
int iOverscanAppropriateFlag;
int iVideoSignalTypePresentFlag;
int iVideoFormat;
int iVideoFullRangeFlag;
int iColourDescriptionPresentFlag;
uint8_t u8ColourPrimaries;
uint8_t u8TransferCharacteristic;
uint8_t u8MatrixCoeffs;
int iChromaLocInfoPresentFlag;
int iChromaSampleLocTypeTopField;
int iChromaSampleLocTypeBottomField;
int iNeutraChromaIndicationFlag;
int iFieldSeqFlag;
int iFrameFieldInfoPresentFlag;
int iDefaultDisplayWindowFlag;
T_HEVCWindow tDefDispWin;
int iVuiTimingInfoPresentFlag;
uint32_t u32VuiNumUnitsInTick;
uint32_t u32VuiTimeScale;
int iVuiPocProportionalToTimingFlag;
int iVuiNumTicksPocDiffOneMinus1;
int iVuiHrdParametersPresentFlag;
int iBitstreamRestrictionFlag;
int iTilesFixedStructureFlag;
int iMotionVectorsOverPicBoundariesFlag;
int iRestrictedRefPicListsFlag;
int iMinSpatialSegmentationIdc;
int iMaxBytesPerPicDenom;
int iMaxBitsPerMinCuDenom;
int iLog2MaxMvLengthHorizontal;
int iLog2MaxMvLengthVertical;
} T_VUI;
typedef struct T_PTLCommon {
uint8_t u8ProfileSpace;
uint8_t u8TierFlag;
uint8_t u8ProfileIdc;
uint8_t au8ProfileCompatibilityFlag[32];
uint8_t u8LevelIdc;
uint8_t u8ProgressiveSourceFlag;
uint8_t u8InterlacedSourceFlag;
uint8_t u8NonPackedConstraintFlag;
uint8_t u8FrameOnlyConstraintFlag;
} T_PTLCommon;
typedef struct T_PTL {
T_PTLCommon tGeneralPtl;
T_PTLCommon atSubLayerPtl[HEVC_MAX_SUB_LAYERS];
uint8_t au8SubLayerProfilePresentFlag[HEVC_MAX_SUB_LAYERS];
uint8_t au8SubLayerLevelPresentFlag[HEVC_MAX_SUB_LAYERS];
} T_PTL;
typedef struct T_ScalingList {
/* This is a little wasteful, since sizeID 0 only needs 8 coeffs,
* and size ID 3 only has 2 arrays, not 6. */
uint8_t aaau8Sl[4][6][64];
uint8_t aau8SlDc[2][6];
} T_ScalingList;
typedef struct T_ShortTermRPS {
unsigned int uiNumNegativePics;
int iNumDeltaPocs;
int iRpsIdxNumDeltaPocs;
int32_t au32DeltaPoc[32];
uint8_t au8Used[32];
} T_ShortTermRPS;
typedef struct T_HEVCVPS {
uint8_t u8VpsTemporalIdNestingFlag;
int iVpsMaxLayers;
int iVpsMaxSubLayers; ///< vps_max_temporal_layers_minus1 + 1
T_PTL tPtl;
int iVpsSubLayerOrderingInfoPresentFlag;
unsigned int uiVpsMaxDecPicBuffering[HEVC_MAX_SUB_LAYERS];
unsigned int auiVpsNumReorderPics[HEVC_MAX_SUB_LAYERS];
unsigned int auiVpsMaxLatencyIncrease[HEVC_MAX_SUB_LAYERS];
int iVpsMaxLayerId;
int iVpsNumLayerSets; ///< vps_num_layer_sets_minus1 + 1
uint8_t u8VpsTimingInfoPresentFlag;
uint32_t u32VpsNumUnitsInTick;
uint32_t u32VpsTimeScale;
uint8_t u8VpsPocProportionalToTimingFlag;
int iVpsNumTicksPocDiffOne; ///< vps_num_ticks_poc_diff_one_minus1 + 1
int iVpsNumHrdParameters;
} T_HEVCVPS;
typedef struct T_HEVCSPS {
unsigned int uiVpsId;
int iChromaFormatIdc;
uint8_t u8SeparateColourPlaneFlag;
///< output (i.e. cropped) values
int iIutputWidth, iOutputHeight;
T_HEVCWindow tOutputWindow;
T_HEVCWindow tPicConfWin;
int iBitDepth;
int iBitDepthChroma;
int iPixelShift;
unsigned int uiLog2MaxPocLsb;
int iPcmEnabledFlag;
int iMaxSubLayers;
struct {
int iMaxDecPicBuffering;
int iNumReorderPics;
int iMaxLatencyIncrease;
} stTemporalLayer[HEVC_MAX_SUB_LAYERS];
uint8_t u8temporalIdNestingFlag;
T_VUI tVui;
T_PTL tPtl;
uint8_t u8ScalingListEnableFlag;
T_ScalingList tScalingList;
unsigned int uiNbStRps;
T_ShortTermRPS atStRps[HEVC_MAX_SHORT_TERM_RPS_COUNT];
uint8_t u8AmpEnabledFlag;
uint8_t u8SaoEnabled;
uint8_t u8LongTermRefPicsPresentFlag;
uint16_t au16LtRefPicPocLsbSps[32];
uint8_t au8UsedByCurrPicLtSpsFlag[32];
uint8_t u8NumLongTermRefPicsSps;
struct {
uint8_t u8BitDepth;
uint8_t u8BitDepthChroma;
unsigned int uiLog2MinPcmCbSize;
unsigned int uiLog2MaxPcmCbSize;
uint8_t u8LoopFilterDisableFlag;
} pcm;
uint8_t u8SpsTemporalMvpEnabledFlag;
uint8_t u8SpsStrongIntraMmoothingEnableFlag;
unsigned int uiLog2MinCbSize;
unsigned int uiLog2DiffMaxMinCodingBlockSize;
unsigned int uiLog2MinTbSize;
unsigned int uiLog2MaxTrafoSize;
unsigned int uiLog2CtbSize;
unsigned int uiLog2MinPuSize;
int iMaxTransformHierarchyDepthInter;
int iMaxTransformHierarchyDepthIntra;
int iTransformSkipRotationEnabledFlag;
int iTransformSkipContextEnabledFlag;
int iImplicitRdpcmEnabledFlag;
int iExplicitRdpcmEnabledFlag;
int iIntraSmoothingDisabledFlag;
int iHighPrecisionOffsetsEnabledFlag;
int iPersistentRiceAdaptationEnabledFlag;
///< coded frame dimension in various units
int iWidth;
int iHeight;
int iCtbWidth;
int iCtbHeight;
int iCtbSize;
int iMinCbWidth;
int iMinCbHeight;
int iMinTbWidth;
int iMinTbHeight;
int iMinPuWidth;
int iMinPuHeight;
int iTbMask;
int aiHshift[3];
int aiVshift[3];
int iQpBdOffset;
int iVuiPresent;
}T_HEVCSPS;
typedef struct T_GetBitContext{
uint8_t *pu8Buf; /*Ö¸ÏòSPS start*/
int iBufSize; /*SPS ³¤¶È*/
@ -180,8 +448,15 @@ typedef struct T_GetBitContext{
int h264DecSeqParameterSet(void *pvBuf, T_SPS *ptSps);
int h265DecSeqParameterSet( void *pvBufSrc, T_HEVCSPS *ptSps );
int h265DecVideoParameterSet( void *pvBufSrc, T_HEVCVPS *ptVps );
void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight);
void h265GetWidthHeight(T_HEVCSPS *ptSps, int *piWidth, int *piHeight);
void h264GeFramerate(T_SPS *ptSps, float *pfFramerate);
void h265GeFramerate(T_HEVCVPS *ptVps, T_HEVCSPS *ptSps,float *pfFramerate);
#if defined (__cplusplus)
}

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -212,11 +212,12 @@ inline bool HttpSession::checkWebSocket(){
headerOut["Sec-WebSocket-Protocol"] = _parser["Sec-WebSocket-Protocol"];
}
sendResponse("101 Switching Protocols",headerOut,"");
checkLiveFlvStream(true);
return true;
}
//http-flv 链接格式:http://vhost-url:port/app/streamid.flv?key1=value1&key2=value2
//如果url(除去?以及后面的参数)后缀是.flv,那么表明该url是一个http-flv直播。
inline bool HttpSession::checkLiveFlvStream(){
inline bool HttpSession::checkLiveFlvStream(bool over_websocket){
auto pos = strrchr(_parser.Url().data(),'.');
if(!pos){
//未找到".flv"后缀
@ -239,7 +240,7 @@ inline bool HttpSession::checkLiveFlvStream(){
bool bClose = (strcasecmp(_parser["Connection"].data(),"close") == 0) || ( ++_iReqCnt > reqCnt);
weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this());
MediaSource::findAsync(_mediaInfo,weakSelf.lock(), true,[weakSelf,bClose,this](const MediaSource::Ptr &src){
MediaSource::findAsync(_mediaInfo,weakSelf.lock(), true,[weakSelf,bClose,this,over_websocket](const MediaSource::Ptr &src){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
//本对象已经销毁
@ -248,29 +249,35 @@ inline bool HttpSession::checkLiveFlvStream(){
auto rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(src);
if(!rtmp_src){
//未找到该流
sendNotFound(bClose);
if(!over_websocket){
sendNotFound(bClose);
}
if(bClose){
shutdown(SockException(Err_shutdown,"flv stream not found"));
}
return;
}
//找到流了
auto onRes = [this,rtmp_src](const string &err){
auto onRes = [this,rtmp_src,over_websocket](const string &err){
bool authSuccess = err.empty();
if(!authSuccess){
sendResponse("401 Unauthorized", makeHttpHeader(true,err.size()),err);
if(!over_websocket){
sendResponse("401 Unauthorized", makeHttpHeader(true,err.size()),err);
}
shutdown(SockException(Err_shutdown,StrPrinter << "401 Unauthorized:" << err));
return ;
}
//找到rtmp源发送http头负载后续发送
sendResponse("200 OK", makeHttpHeader(false,0,get_mime_type(".flv")), "");
if(!over_websocket) {
//找到rtmp源发送http头负载后续发送
sendResponse("200 OK", makeHttpHeader(false, 0, get_mime_type(".flv")), "");
}
//开始发送rtmp负载
//关闭tcp_nodelay ,优化性能
SockUtil::setNoDelay(_sock->rawFD(),false);
(*this) << SocketFlags(kSockFlags);
_flv_over_websocket = over_websocket;
try{
start(getPoller(),rtmp_src);
}catch (std::exception &ex){
@ -473,7 +480,7 @@ inline void HttpSession::Handle_Req_GET(int64_t &content_len) {
}
//再看看是否为http-flv直播请求
if(checkLiveFlvStream()){
if(checkLiveFlvStream(false)){
return;
}
@ -936,8 +943,23 @@ inline void HttpSession::sendNotFound(bool bClose) {
void HttpSession::onWrite(const Buffer::Ptr &buffer) {
_ticker.resetTime();
_ui64TotalBytes += buffer->size();
send(buffer);
if(!_flv_over_websocket){
_ui64TotalBytes += buffer->size();
send(buffer);
return;
}
WebSocketHeader header;
header._fin = true;
header._reserved = 0;
header._opcode = WebSocketHeader::BINARY;
header._mask_flag = false;
WebSocketSplitter::encode(header,(uint8_t *)buffer->data(),buffer->size());
}
void HttpSession::onWebSocketEncodeData(const uint8_t *ptr,uint64_t len){
_ui64TotalBytes += len;
SocketHelper::send((char *)ptr,len);
}
void HttpSession::onDetach() {

View File

@ -102,10 +102,16 @@ protected:
WebSocketSplitter::decode((uint8_t *)data,len);
}
/**
* websocket协议打包后回调
* @param ptr
* @param len
*/
void onWebSocketEncodeData(const uint8_t *ptr,uint64_t len) override;
private:
inline void Handle_Req_GET(int64_t &content_len);
inline void Handle_Req_POST(int64_t &content_len);
inline bool checkLiveFlvStream();
inline bool checkLiveFlvStream(bool over_websocket = false);
inline bool checkWebSocket();
inline bool emitHttpEvent(bool doInvoke);
inline void urlDecode(Parser &parser);
@ -148,6 +154,7 @@ private:
MediaInfo _mediaInfo;
//处理content数据的callback
function<bool (const char *data,uint64_t len) > _contentCallBack;
bool _flv_over_websocket = false;
};

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

282
src/MediaFile/MP4Muxer.cpp Normal file
View File

@ -0,0 +1,282 @@
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifdef ENABLE_MP4RECORD
#include "MP4Muxer.h"
#include "Util/File.h"
#include "Common/config.h"
namespace mediakit{
#if defined(_WIN32) || defined(_WIN64)
#define fseek64 _fseeki64
#define ftell64 _ftelli64
#else
#define fseek64 fseek
#define ftell64 ftell
#endif
void MP4MuxerBase::init(int flags) {
static struct mov_buffer_t s_io = {
[](void* ctx, void* data, uint64_t bytes) {
MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
return thiz->onRead(data,bytes);
},
[](void* ctx, const void* data, uint64_t bytes){
MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
return thiz->onWrite(data,bytes);
},
[](void* ctx, uint64_t offset) {
MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
return thiz->onSeek(offset);
},
[](void* ctx){
MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
return thiz->onTell();
}
};
_mov_writter.reset(mov_writer_create(&s_io,this,flags),[](mov_writer_t *ptr){
if(ptr){
mov_writer_destroy(ptr);
}
});
}
///////////////////////////////////
void MP4Muxer::onTrackFrame(const Frame::Ptr &frame) {
if(frame->configFrame()){
//忽略配置帧
return;
}
auto it = _codec_to_trackid.find(frame->getCodecId());
if(it == _codec_to_trackid.end()){
//该Track不存在或初始化失败
return;
}
if(!_started){
//还没开始
if(frame->getTrackType() != TrackVideo || !frame->keyFrame()){
//如果首帧是音频或者是视频但是不是i帧那么不能开始写文件
return;
}
//开始写文件
_started = true;
}
int with_nalu_size ;
switch (frame->getCodecId()){
case CodecH264:
case CodecH265:
//我们输入264、265是没有头四个字节表明数据长度的
with_nalu_size = 0;
break;
default:
//aac或其他类型frame不用添加4个nalu_size的字节
with_nalu_size = 1;
break;
}
//mp4文件时间戳需要从0开始
auto &track_info = it->second;
int64_t dts_out, pts_out;
track_info.stamp.revise(frame->dts(),frame->pts(),dts_out,pts_out);
mov_writer_write_l(_mov_writter.get(),
track_info.track_id,
frame->data() + frame->prefixSize(),
frame->size() - frame->prefixSize(),
pts_out,
dts_out,
frame->keyFrame() ? MOV_AV_FLAG_KEYFREAME : 0,
with_nalu_size);
}
void MP4Muxer::onTrackReady(const Track::Ptr &track) {
switch (track->getCodecId()) {
case CodecAAC: {
auto aac_track = dynamic_pointer_cast<AACTrack>(track);
if (!aac_track) {
WarnL << "不是AAC Track";
return;
}
auto track_id = mov_writer_add_audio(_mov_writter.get(),
MOV_OBJECT_AAC,
aac_track->getAudioChannel(),
aac_track->getAudioSampleBit() * aac_track->getAudioChannel(),
aac_track->getAudioSampleRate(),
aac_track->getAacCfg().data(), 2);
if(track_id < 0){
WarnL << "添加AAC Track失败:" << track_id;
return;
}
track_info info;
info.track_id = track_id;
_codec_to_trackid[track->getCodecId()] = info;
}
break;
case CodecH264: {
auto h264_track = dynamic_pointer_cast<H264Track>(track);
if (!h264_track) {
WarnL << "不是H264 Track";
return;
}
struct mpeg4_avc_t avc;
string sps_pps = string("\x00\x00\x00\x01", 4) + h264_track->getSps() +
string("\x00\x00\x00\x01", 4) + h264_track->getPps();
h264_annexbtomp4(&avc, sps_pps.data(), sps_pps.size(), NULL, 0, NULL);
uint8_t extra_data[1024];
int extra_data_size = mpeg4_avc_decoder_configuration_record_save(&avc, extra_data, sizeof(extra_data));
if (extra_data_size == -1) {
WarnL << "生成H264 extra_data 失败";
return;
}
auto track_id = mov_writer_add_video(_mov_writter.get(),
MOV_OBJECT_H264,
h264_track->getVideoWidth(),
h264_track->getVideoHeight(),
extra_data,
extra_data_size);
if(track_id < 0){
WarnL << "添加H264 Track失败:" << track_id;
return;
}
track_info info;
info.track_id = track_id;
_codec_to_trackid[track->getCodecId()] = info;
}
break;
case CodecH265: {
auto h265_track = dynamic_pointer_cast<H265Track>(track);
if (!h265_track) {
WarnL << "不是H265 Track";
return;
}
struct mpeg4_hevc_t hevc;
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + h265_track->getVps() +
string("\x00\x00\x00\x01", 4) + h265_track->getSps() +
string("\x00\x00\x00\x01", 4) + h265_track->getPps();
h265_annexbtomp4(&hevc, vps_sps_pps.data(), vps_sps_pps.size(), NULL, 0, NULL);
uint8_t extra_data[1024];
int extra_data_size = mpeg4_hevc_decoder_configuration_record_save(&hevc, extra_data, sizeof(extra_data));
if (extra_data_size == -1) {
WarnL << "生成H265 extra_data 失败";
return;
}
auto track_id = mov_writer_add_video(_mov_writter.get(),
MOV_OBJECT_HEVC,
h265_track->getVideoWidth(),
h265_track->getVideoHeight(),
extra_data,
extra_data_size);
if(track_id < 0){
WarnL << "添加H265 Track失败:" << track_id;
return;
}
track_info info;
info.track_id = track_id;
_codec_to_trackid[track->getCodecId()] = info;
}
break;
default:
WarnL << "MP4录制不支持该编码格式:" << track->getCodecId();
break;
}
}
MP4MuxerFile::MP4MuxerFile(const char *file) {
//创建文件
auto fp = File::createfile_file(file,"wb+");
if(!fp){
throw std::runtime_error(string("打开文件失败:") + file);
}
GET_CONFIG(uint32_t,mp4BufSize,Record::kFileBufSize);
//新建文件io缓存
std::shared_ptr<char> file_buf(new char[mp4BufSize],[](char *ptr){
if(ptr){
delete [] ptr;
}
});
if(file_buf){
//设置文件io缓存
setvbuf(fp, file_buf.get(), _IOFBF, mp4BufSize);
}
//创建智能指针
_file.reset(fp,[file_buf](FILE *fp) {
fclose(fp);
});
init(MOV_FLAG_FASTSTART);
}
MP4MuxerFile::~MP4MuxerFile() {
_mov_writter = nullptr;
}
int MP4MuxerFile::onRead(void *data, uint64_t bytes) {
if (bytes == fread(data, 1, bytes, _file.get())){
return 0;
}
return 0 != ferror(_file.get()) ? ferror(_file.get()) : -1 /*EOF*/;
}
int MP4MuxerFile::onWrite(const void *data, uint64_t bytes) {
return bytes == fwrite(data, 1, bytes, _file.get()) ? 0 : ferror(_file.get());
}
#if defined(_WIN32) || defined(_WIN64)
#define fseek64 _fseeki64
#define ftell64 _ftelli64
#else
#define fseek64 fseek
#define ftell64 ftell
#endif
int MP4MuxerFile::onSeek(uint64_t offset) {
return fseek64(_file.get(), offset, SEEK_SET);
}
uint64_t MP4MuxerFile::onTell() {
return ftell64(_file.get());
}
}//namespace mediakit
#endif//#ifdef ENABLE_MP4RECORD

105
src/MediaFile/MP4Muxer.h Normal file
View File

@ -0,0 +1,105 @@
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef ZLMEDIAKIT_MP4MUXER_H
#define ZLMEDIAKIT_MP4MUXER_H
#ifdef ENABLE_MP4RECORD
#include "Common/MediaSink.h"
#include "mov-writer.h"
#include "mpeg4-hevc.h"
#include "mpeg4-avc.h"
#include "mpeg4-aac.h"
#include "mov-buffer.h"
#include "mov-format.h"
#include "Extension/AAC.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
#include "Stamp.h"
namespace mediakit{
class MP4MuxerBase{
public:
MP4MuxerBase() = default;
virtual ~MP4MuxerBase() = default;
protected:
virtual int onRead(void* data, uint64_t bytes) = 0;
virtual int onWrite(const void* data, uint64_t bytes) = 0;
virtual int onSeek( uint64_t offset) = 0;
virtual uint64_t onTell() = 0;
void init(int flags);
protected:
std::shared_ptr<mov_writer_t> _mov_writter;
};
class MP4Muxer : public MediaSink , public MP4MuxerBase{
public:
MP4Muxer() = default;
~MP4Muxer() override = default;
protected:
/**
* track已经准备好ready()true
* sps pps等相关信息了
* @param track
*/
void onTrackReady(const Track::Ptr & track) override;
/**
* Track输出frameonAllTrackReady触发后才会调用此方法
* @param frame
*/
void onTrackFrame(const Frame::Ptr &frame) override;
private:
struct track_info{
int track_id = -1;
Stamp stamp;
};
unordered_map<int,track_info> _codec_to_trackid;
bool _started = false;
};
class MP4MuxerFile : public MP4Muxer {
public:
typedef std::shared_ptr<MP4MuxerFile> Ptr;
MP4MuxerFile(const char *file);
~MP4MuxerFile();
protected:
int onRead(void* data, uint64_t bytes) override;
int onWrite(const void* data, uint64_t bytes) override;
int onSeek( uint64_t offset) override;
uint64_t onTell() override ;
private:
std::shared_ptr<FILE> _file;
};
}//namespace mediakit
#endif//#ifdef ENABLE_MP4RECORD
#endif //ZLMEDIAKIT_MP4MUXER_H

View File

@ -106,11 +106,11 @@ void MediaRecorder::addTrack(const Track::Ptr &track) {
}
#endif //defined(ENABLE_HLS)
#if defined(ENABLE_MP4V2)
#if defined(ENABLE_MP4RECORD)
if (_mp4Maker) {
_mp4Maker->addTrack(track);
}
#endif //defined(ENABLE_MP4V2)
#endif //defined(ENABLE_MP4RECORD)
}
} /* namespace mediakit */

View File

@ -64,9 +64,9 @@ private:
std::shared_ptr<HlsRecorder> _hlsMaker;
#endif //defined(ENABLE_HLS)
#if defined(ENABLE_MP4V2)
#if defined(ENABLE_MP4RECORD)
std::shared_ptr<Mp4Maker> _mp4Maker;
#endif //defined(ENABLE_MP4V2)
#endif //defined(ENABLE_MP4RECORD)
};
} /* namespace mediakit */

View File

@ -24,18 +24,13 @@
* SOFTWARE.
*/
#ifdef ENABLE_MP4V2
#ifdef ENABLE_MP4RECORD
#include <ctime>
#include <sys/stat.h>
#include "Common/config.h"
#include "Mp4Maker.h"
#include "MediaRecorder.h"
#include "Util/File.h"
#include "Util/mini.h"
#include "Util/util.h"
#include "Util/NoticeCenter.h"
#include "Extension/H264.h"
#include "Extension/AAC.h"
#include "Thread/WorkThreadPool.h"
using namespace toolkit;
@ -62,88 +57,19 @@ Mp4Maker::Mp4Maker(const string& strPath,
const string &strVhost,
const string &strApp,
const string &strStreamId) {
DebugL << strPath;
_strPath = strPath;
/////record 业务逻辑//////
_info.strAppName = strApp;
_info.strStreamId = strStreamId;
_info.strVhost = strVhost;
_info.strFolder = strPath;
//----record 业务逻辑----//
}
Mp4Maker::~Mp4Maker() {
closeFile();
}
void Mp4Maker::inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
auto iType = H264_TYPE(((uint8_t*)pData)[0]);
switch (iType) {
case H264Frame::NAL_B_P: //P
case H264Frame::NAL_IDR: { //IDR
if (_strLastVideo.size()) {
int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastVideoTime;
iTimeInc = MAX(0,MIN(iTimeInc,500));
if(iTimeInc == 0 || iTimeInc == 500){
WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastVideoTime;
}
inputH264_l((char *) _strLastVideo.data(), _strLastVideo.size(), iTimeInc);
}
uint32_t prefixe = htonl(ui32Length);
_strLastVideo.assign((char *) &prefixe, 4);
_strLastVideo.append((char *)pData,ui32Length);
_ui32LastVideoTime = ui32TimeStamp;
}
break;
default:
break;
}
}
void Mp4Maker::inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
if (_strLastAudio.size()) {
int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastAudioTime;
iTimeInc = MAX(0,MIN(iTimeInc,500));
if(iTimeInc == 0 || iTimeInc == 500){
WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastAudioTime;
}
inputAAC_l((char *) _strLastAudio.data(), _strLastAudio.size(), iTimeInc);
}
_strLastAudio.assign((char *)pData, ui32Length);
_ui32LastAudioTime = ui32TimeStamp;
}
void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
auto iType = H264_TYPE(((uint8_t*)pData)[4]);
if(iType == H264Frame::NAL_IDR && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)){
//在I帧率处新建MP4文件
//如果文件未创建或者文件超过10分钟则创建新文件
createFile();
}
if (_hVideo != MP4_INVALID_TRACK_ID) {
MP4WriteSample(_hMp4, _hVideo, (uint8_t *) pData, ui32Length,ui32Duration * 90,0,iType == 5);
}
}
void Mp4Maker::inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
if (!_haveVideo && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)) {
//在I帧率处新建MP4文件
//如果文件未创建或者文件超过10分钟则创建新文件
createFile();
}
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto duration = ui32Duration * _audioSampleRate /1000.0;
MP4WriteSample(_hMp4, _hAudio, (uint8_t*)pData, ui32Length,duration,0,false);
}
}
void Mp4Maker::createFile() {
closeFile();
auto strDate = timeStr("%Y-%m-%d");
auto strTime = timeStr("%H-%M-%S");
auto strFileTmp = _strPath + strDate + "/." + strTime + ".mp4";
@ -153,76 +79,37 @@ void Mp4Maker::createFile() {
_info.ui64StartedTime = ::time(NULL);
_info.strFileName = strTime + ".mp4";
_info.strFilePath = strFile;
GET_CONFIG(string,appName,Record::kAppName);
_info.strUrl = appName + "/"
+ _info.strAppName + "/"
+ _info.strStreamId + "/"
+ strDate + "/"
+ strTime + ".mp4";
//----record 业务逻辑----//
#if !defined(_WIN32)
File::createfile_path(strFileTmp.data(), S_IRWXO | S_IRWXG | S_IRWXU);
#else
File::createfile_path(strFileTmp.data(), 0);
#endif
_hMp4 = MP4Create(strFileTmp.data());
if (_hMp4 == MP4_INVALID_FILE_HANDLE) {
WarnL << "创建MP4文件失败:" << strFileTmp;
return;
}
//MP4SetTimeScale(_hMp4, 90000);
_strFileTmp = strFileTmp;
_strFile = strFile;
_ticker.resetTime();
auto videoTrack = dynamic_pointer_cast<H264Track>(getTrack(TrackVideo));
if(videoTrack){
auto &sps = videoTrack->getSps();
auto &pps = videoTrack->getPps();
_hVideo = MP4AddH264VideoTrack(_hMp4,
90000,
MP4_INVALID_DURATION,
videoTrack->getVideoWidth(),
videoTrack->getVideoHeight(),
sps[1],
sps[2],
sps[3],
3);
if(_hVideo != MP4_INVALID_TRACK_ID){
MP4AddH264SequenceParameterSet(_hMp4, _hVideo, (uint8_t *)sps.data(), sps.size());
MP4AddH264PictureParameterSet(_hMp4, _hVideo, (uint8_t *)pps.data(), pps.size());
}else{
WarnL << "添加视频通道失败:" << strFileTmp;
}
}
auto audioTrack = dynamic_pointer_cast<AACTrack>(getTrack(TrackAudio));
if(audioTrack){
_audioSampleRate = audioTrack->getAudioSampleRate();
_hAudio = MP4AddAudioTrack(_hMp4, _audioSampleRate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto &cfg = audioTrack->getAacCfg();
MP4SetTrackESConfiguration(_hMp4, _hAudio,(uint8_t *)cfg.data(), cfg.size());
}else{
WarnL << "添加音频通道失败:" << strFileTmp;
try {
_muxer = std::make_shared<MP4MuxerFile>(strFileTmp.data());
for(auto &track :_tracks){
//添加track
_muxer->addTrack(track);
}
_strFileTmp = strFileTmp;
_strFile = strFile;
_createFileTicker.resetTime();
}catch(std::exception &ex) {
WarnL << ex.what();
}
}
void Mp4Maker::asyncClose() {
auto hMp4 = _hMp4;
auto muxer = _muxer;
auto strFileTmp = _strFileTmp;
auto strFile = _strFile;
auto info = _info;
WorkThreadPool::Instance().getExecutor()->async([hMp4,strFileTmp,strFile,info]() {
//获取文件录制时间,放在MP4Close之前是为了忽略MP4Close执行时间
WorkThreadPool::Instance().getExecutor()->async([muxer,strFileTmp,strFile,info]() {
//获取文件录制时间放在关闭mp4之前是为了忽略关闭mp4执行时间
const_cast<Mp4Info&>(info).ui64TimeLen = ::time(NULL) - info.ui64StartedTime;
//MP4Close非常耗时,所以要放在后台线程执行
MP4Close(hMp4,MP4_CLOSE_DO_NOT_COMPUTE_BITRATE);
//关闭mp4非常耗时所以要放在后台线程执行
const_cast<MP4MuxerFile::Ptr &>(muxer).reset();
//临时文件名改成正式文件名防止mp4未完成时被访问
rename(strFileTmp.data(),strFile.data());
//获取文件大小
@ -235,35 +122,38 @@ void Mp4Maker::asyncClose() {
}
void Mp4Maker::closeFile() {
if (_hMp4 != MP4_INVALID_FILE_HANDLE) {
if (_muxer) {
asyncClose();
_hMp4 = MP4_INVALID_FILE_HANDLE;
_hVideo = MP4_INVALID_TRACK_ID;
_hAudio = MP4_INVALID_TRACK_ID;
_muxer = nullptr;
}
}
void Mp4Maker::onTrackFrame(const Frame::Ptr &frame) {
switch (frame->getCodecId()){
case CodecH264:{
inputH264(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp());
}
break;
case CodecAAC:{
inputAAC(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp());
}
break;
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
if(!_muxer || ((_createFileTicker.elapsedTime() > recordSec * 1000) &&
(!_haveVideo || (_haveVideo && frame->keyFrame()))) ){
//成立条件
//1、_muxer为空
//2、到了切片时间并且只有音频
//3、到了切片时间有视频并且遇到视频的关键帧
createFile();
}
default:
break;
if(_muxer){
//生成mp4文件
_muxer->inputFrame(frame);
}
}
void Mp4Maker::onAllTrackReady() {
_haveVideo = getTrack(TrackVideo).operator bool();
void Mp4Maker::onTrackReady(const Track::Ptr & track){
//保存所有的track为创建MP4MuxerFile做准备
_tracks.emplace_back(track);
if(track->getTrackType() == TrackVideo){
_haveVideo = true;
}
}
} /* namespace mediakit */
#endif //ENABLE_MP4V2
#endif //ENABLE_MP4RECORD

View File

@ -1,4 +1,4 @@
/*
 /*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@ -27,19 +27,17 @@
#ifndef MP4MAKER_H_
#define MP4MAKER_H_
#ifdef ENABLE_MP4V2
#ifdef ENABLE_MP4RECORD
#include <mutex>
#include <memory>
#include <mp4v2/mp4v2.h>
#include "Player/PlayerBase.h"
#include "Util/util.h"
#include "Util/logger.h"
#include "Util/TimeTicker.h"
#include "Util/TimeTicker.h"
#include "Common/MediaSink.h"
#include "Extension/Track.h"
#include "MP4Muxer.h"
using namespace toolkit;
namespace mediakit {
@ -72,44 +70,29 @@ private:
*/
void onTrackFrame(const Frame::Ptr &frame) override ;
/**
* Track准备好了
*/
void onAllTrackReady() override;
/**
* track已经准备好ready()true
* sps pps等相关信息了
* @param track
*/
void onTrackReady(const Track::Ptr & track) override;
private:
void createFile();
void closeFile();
void asyncClose();
//时间戳参考频率1000
void inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
//时间戳参考频率1000
void inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
void inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
private:
MP4FileHandle _hMp4 = MP4_INVALID_FILE_HANDLE;
MP4TrackId _hVideo = MP4_INVALID_TRACK_ID;
MP4TrackId _hAudio = MP4_INVALID_TRACK_ID;
string _strPath;
string _strFile;
string _strFileTmp;
Ticker _ticker;
string _strLastVideo;
string _strLastAudio;
uint32_t _ui32LastVideoTime = 0;
uint32_t _ui32LastAudioTime = 0;
Ticker _createFileTicker;
Mp4Info _info;
bool _haveVideo = false;
int _audioSampleRate;
MP4MuxerFile::Ptr _muxer;
list<Track::Ptr> _tracks;
};
} /* namespace mediakit */
#endif ///ENABLE_MP4V2
#endif ///ENABLE_MP4RECORD
#endif /* MP4MAKER_H_ */

79
src/MediaFile/Stamp.cpp Normal file
View File

@ -0,0 +1,79 @@
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#include "Stamp.h"
namespace mediakit {
void Stamp::revise(uint32_t dts, uint32_t pts, int64_t &dts_out, int64_t &pts_out) {
if(_first){
//记录第一次时间戳,后面好计算时间戳增量
_start_dts = dts;
_first = false;
_ticker = std::make_shared<SmoothTicker>();
}
//pts和dts的差值
int pts_dts_diff = pts - dts;
if(_modifyStamp){
dts = _ticker->elapsedTime();
}
//相对时间戳
dts_out = dts - _start_dts;
if(dts_out < _dts_inc){
//本次相对时间戳竟然小于上次?
if(dts_out < 0 || _dts_inc - dts_out > 0xFFFF){
//时间戳回环,保证下次相对时间戳与本次相对合理增长
_start_dts = dts - _dts_inc;
//本次时间戳强制等于上次时间戳
dts_out = _dts_inc;
}else{
//时间戳变小了?,那么取上次时间戳
dts_out = _dts_inc;
}
}
//保留这次相对时间戳,以便下次对比是否回环或乱序
_dts_inc = dts_out;
//////////////以下是播放时间戳的计算//////////////////
if(!pts){
//没有播放时间戳
pts = dts;
}
if(pts_dts_diff > 200 || pts_dts_diff < -200){
//如果差值大于200毫秒则认为由于回环导致时间戳错乱了
pts_dts_diff = 0;
}
pts_out = dts_out + pts_dts_diff;
if(pts_out < 0){
//时间戳不能小于0
pts_out = 0;
}
}
}//namespace mediakit

51
src/MediaFile/Stamp.h Normal file
View File

@ -0,0 +1,51 @@
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef ZLMEDIAKIT_STAMP_H
#define ZLMEDIAKIT_STAMP_H
#include "Util/TimeTicker.h"
#include <cstdint>
using namespace toolkit;
namespace mediakit {
class Stamp {
public:
Stamp(bool modifyStamp = false) {_modifyStamp = modifyStamp;};
~Stamp() = default;
void revise(uint32_t dts, uint32_t pts, int64_t &dts_out, int64_t &pts_out);
private:
int64_t _start_dts = 0;
int64_t _dts_inc = 0;
bool _first = true;
bool _modifyStamp;
std::shared_ptr<SmoothTicker> _ticker;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_STAMP_H

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@ -41,54 +41,76 @@ TsMuxer::~TsMuxer() {
void TsMuxer::addTrack(const Track::Ptr &track) {
switch (track->getCodecId()){
case CodecH264:
_codecid_to_stream_id[CodecH264] = mpeg_ts_add_stream(_context,PSI_STREAM_H264, nullptr,0);
break;
case CodecH265:
_codecid_to_stream_id[CodecH265] = mpeg_ts_add_stream(_context,PSI_STREAM_H265, nullptr,0);
break;
case CodecAAC:
_codecid_to_stream_id[CodecAAC] = mpeg_ts_add_stream(_context,PSI_STREAM_AAC, nullptr,0);
break;
case CodecH264: {
track_info info;
info.track_id = mpeg_ts_add_stream(_context, PSI_STREAM_H264, nullptr, 0);
_codec_to_trackid[track->getCodecId()] = info;
} break;
case CodecH265: {
track_info info;
info.track_id = mpeg_ts_add_stream(_context, PSI_STREAM_H265, nullptr, 0);
_codec_to_trackid[track->getCodecId()] = info;
}break;
case CodecAAC: {
track_info info;
info.track_id = mpeg_ts_add_stream(_context, PSI_STREAM_AAC, nullptr, 0);
_codec_to_trackid[track->getCodecId()] = info;
}break;
default:
break;
}
}
void TsMuxer::inputFrame(const Frame::Ptr &frame) {
auto it = _codecid_to_stream_id.find(frame->getCodecId());
if(it == _codecid_to_stream_id.end()){
auto it = _codec_to_trackid.find(frame->getCodecId());
if(it == _codec_to_trackid.end()){
return;
}
//mp4文件时间戳需要从0开始
auto &track_info = it->second;
int64_t dts_out, pts_out;
switch (frame->getCodecId()){
case CodecH265:
case CodecH264: {
//这里的代码逻辑是让SPS、PPS、IDR这些时间戳相同的帧打包到一起当做一个帧处理
if (!_frameCached.empty() && _frameCached.back()->dts() != frame->dts()) {
Frame::Ptr back = _frameCached.back();
Buffer::Ptr merged_frame = back;
if(_frameCached.size() != 1){
string merged;
_frameCached.for_each([&](const Frame::Ptr &frame){
if(frame->prefixSize()){
merged.append(frame->data(),frame->size());
} else{
merged.append("\x00\x00\x00\x01",4);
merged.append(frame->data(),frame->size());
}
});
merged_frame = std::make_shared<BufferString>(std::move(merged));
}
_timestamp = back->dts();
mpeg_ts_write(_context, it->second, back->keyFrame() ? 0x0001 : 0, back->pts() * 90LL, back->dts() * 90LL, merged_frame->data(), merged_frame->size());
_frameCached.clear();
Buffer::Ptr merged_frame ;
if(frame->configFrame()){
//配置帧,缓存后直接返回,以便下次输入关键帧时使用
_config_frame_cache.append("\x00\x00\x00\x01",4);
_config_frame_cache.append(frame->data() + frame->prefixSize(),frame->size() - frame->prefixSize());
break;
}
_frameCached.emplace_back(Frame::getCacheAbleFrame(frame));
if(frame->keyFrame()){
//关键帧
if(!_config_frame_cache.empty()){
//有配置帧,那么配置帧合并关键帧后输入ts打包
_config_frame_cache.append("\x00\x00\x00\x01",4);
_config_frame_cache.append(frame->data() + frame->prefixSize(),frame->size() - frame->prefixSize());
merged_frame = std::make_shared<BufferString>(std::move(_config_frame_cache));
_config_frame_cache.clear();
}else{
//这是非第一个的关键帧(h265有多种关键帧)
merged_frame = frame;
}
}else{
//这里是普通帧例如B/P
merged_frame = frame;
//sps、pps这些配置帧清空掉
_config_frame_cache.clear();
}
//输入到ts文件
track_info.stamp.revise(frame->dts(),frame->pts(),dts_out,pts_out);
_timestamp = dts_out;
mpeg_ts_write(_context, track_info.track_id, frame->keyFrame() ? 0x0001 : 0, pts_out * 90LL, dts_out * 90LL, merged_frame->data(), merged_frame->size());
}
break;
default: {
_timestamp = frame->dts();
mpeg_ts_write(_context, it->second, frame->keyFrame() ? 0x0001 : 0, frame->pts() * 90LL, frame->dts() * 90LL, frame->data(), frame->size());
track_info.stamp.revise(frame->dts(),frame->pts(),dts_out,pts_out);
_timestamp = dts_out;
mpeg_ts_write(_context, track_info.track_id, frame->keyFrame() ? 0x0001 : 0, pts_out * 90LL, dts_out * 90LL, frame->data(), frame->size());
}
break;
}
@ -124,7 +146,7 @@ void TsMuxer::uninit() {
mpeg_ts_destroy(_context);
_context = nullptr;
}
_codecid_to_stream_id.clear();
_codec_to_trackid.clear();
}
}//namespace mediakit

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@ -31,6 +31,9 @@
#include "Extension/Frame.h"
#include "Extension/Track.h"
#include "Util/File.h"
#include "Common/MediaSink.h"
#include "Stamp.h"
using namespace toolkit;
namespace mediakit {
@ -39,8 +42,8 @@ class TsMuxer {
public:
TsMuxer();
virtual ~TsMuxer();
void addTrack(const Track::Ptr &track);
void inputFrame(const Frame::Ptr &frame);
void addTrack(const Track::Ptr &track) ;
void inputFrame(const Frame::Ptr &frame) ;
protected:
virtual void onTs(const void *packet, int bytes,uint32_t timestamp,int flags) = 0;
void resetTracks();
@ -51,8 +54,13 @@ private:
void *_context = nullptr;
char *_tsbuf[188];
uint32_t _timestamp = 0;
unordered_map<int,int > _codecid_to_stream_id;
List<Frame::Ptr> _frameCached;
struct track_info{
int track_id = -1;
Stamp stamp;
};
unordered_map<int,track_info> _codec_to_trackid;
string _config_frame_cache;
};
}//namespace mediakit

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
//
//
// Created by xzl on 2019/3/27.
//

View File

@ -1,4 +1,4 @@
//
//
// Created by xzl on 2019/3/27.
//

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -1,4 +1,4 @@
/*
/*
* MIT License
*
* Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>

View File

@ -239,8 +239,7 @@ int main(int argc,char *argv[]) {
//这里是拉流地址支持rtmp/rtsp协议负载必须是H264+AAC
//如果是其他不识别的音视频将会被忽略(譬如说h264+adpcm转发后会去除音频)
auto urlList = {"rtmp://live.hkstv.hk.lxdns.com/live/hks1",
"rtmp://live.hkstv.hk.lxdns.com/live/hks2"
auto urlList = {"rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov"
//rtsp链接支持输入用户名密码
/*"rtsp://admin:jzan123456@192.168.0.122/"*/};
map<string, PlayerProxy::Ptr> proxyMap;