Merge pull request #1137 from ZLMediaKit/feature/add_mute_audio

This commit is contained in:
夏楚 2021-09-27 16:55:19 +08:00 committed by GitHub
commit 16411e1af1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
65 changed files with 537 additions and 430 deletions

View File

@ -46,8 +46,9 @@ API_EXPORT void API_CALL mk_media_release(mk_media ctx);
* @param width
* @param height
* @param fps fps
* @return 10
*/
API_EXPORT void API_CALL mk_media_init_video(mk_media ctx, int codec_id, int width, int height, float fps);
API_EXPORT int API_CALL mk_media_init_video(mk_media ctx, int codec_id, int width, int height, float fps);
/**
*
@ -56,8 +57,15 @@ API_EXPORT void API_CALL mk_media_init_video(mk_media ctx, int codec_id, int wid
* @param channel
* @param sample_bit 16
* @param sample_rate
* @return 10
*/
API_EXPORT void API_CALL mk_media_init_audio(mk_media ctx, int codec_id, int sample_rate, int channels, int sample_bit);
API_EXPORT int API_CALL mk_media_init_audio(mk_media ctx, int codec_id, int sample_rate, int channels, int sample_bit);
/**
*
* @param ctx
*/
API_EXPORT void API_CALL mk_media_init_mute_audio(mk_media ctx);
/**
* h264/h265/aac完毕后调用此函数
@ -74,8 +82,9 @@ API_EXPORT void API_CALL mk_media_init_complete(mk_media ctx);
* @param len H264数据字节数
* @param dts
* @param pts
* @return 10
*/
API_EXPORT void API_CALL mk_media_input_h264(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts);
API_EXPORT int API_CALL mk_media_input_h264(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts);
/**
* H265视频00 00 01,00 00 00 01
@ -84,8 +93,9 @@ API_EXPORT void API_CALL mk_media_input_h264(mk_media ctx, const void *data, int
* @param len H265数据字节数
* @param dts
* @param pts
* @return 10
*/
API_EXPORT void API_CALL mk_media_input_h265(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts);
API_EXPORT int API_CALL mk_media_input_h265(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts);
/**
* AAC音频(adts头)
@ -94,8 +104,9 @@ API_EXPORT void API_CALL mk_media_input_h265(mk_media ctx, const void *data, int
* @param len AAC数据字节数
* @param dts
* @param adts adts头null
* @return 10
*/
API_EXPORT void API_CALL mk_media_input_aac(mk_media ctx, const void *data, int len, uint32_t dts, void *adts);
API_EXPORT int API_CALL mk_media_input_aac(mk_media ctx, const void *data, int len, uint32_t dts, void *adts);
/**
* PCM音频,ENABLE_FAAC编译时
@ -103,8 +114,9 @@ API_EXPORT void API_CALL mk_media_input_aac(mk_media ctx, const void *data, int
* @param data PCM数据
* @param len PCM数据字节数
* @param dts
* @return 10
*/
API_EXPORT void API_CALL mk_media_input_pcm(mk_media ctx, void *data, int len, uint32_t pts);
API_EXPORT int API_CALL mk_media_input_pcm(mk_media ctx, void *data, int len, uint32_t pts);
/**
* OPUS/G711音频帧
@ -112,8 +124,9 @@ API_EXPORT void API_CALL mk_media_input_pcm(mk_media ctx, void *data, int len, u
* @param data
* @param len
* @param dts
* @return 10
*/
API_EXPORT void API_CALL mk_media_input_audio(mk_media ctx, const void* data, int len, uint32_t dts);
API_EXPORT int API_CALL mk_media_input_audio(mk_media ctx, const void* data, int len, uint32_t dts);
/**
* MediaSource.close()

View File

@ -174,7 +174,7 @@ API_EXPORT void API_CALL mk_media_release(mk_media ctx) {
delete obj;
}
API_EXPORT void API_CALL mk_media_init_video(mk_media ctx, int codec_id, int width, int height, float fps){
API_EXPORT int API_CALL mk_media_init_video(mk_media ctx, int codec_id, int width, int height, float fps){
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
VideoInfo info;
@ -182,10 +182,10 @@ API_EXPORT void API_CALL mk_media_init_video(mk_media ctx, int codec_id, int wid
info.iFrameRate = fps;
info.iWidth = width;
info.iHeight = height;
(*obj)->getChannel()->initVideo(info);
return (*obj)->getChannel()->initVideo(info);
}
API_EXPORT void API_CALL mk_media_init_audio(mk_media ctx, int codec_id, int sample_rate, int channels, int sample_bit){
API_EXPORT int API_CALL mk_media_init_audio(mk_media ctx, int codec_id, int sample_rate, int channels, int sample_bit){
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
AudioInfo info;
@ -193,7 +193,13 @@ API_EXPORT void API_CALL mk_media_init_audio(mk_media ctx, int codec_id, int sam
info.iSampleRate = sample_rate;
info.iChannel = channels;
info.iSampleBit = sample_bit;
(*obj)->getChannel()->initAudio(info);
return (*obj)->getChannel()->initAudio(info);
}
API_EXPORT void API_CALL mk_media_init_mute_audio(mk_media ctx) {
assert(ctx);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->getChannel()->addMuteAudioTrack();
}
API_EXPORT void API_CALL mk_media_init_complete(mk_media ctx){
@ -202,34 +208,34 @@ API_EXPORT void API_CALL mk_media_init_complete(mk_media ctx){
(*obj)->getChannel()->addTrackCompleted();
}
API_EXPORT void API_CALL mk_media_input_h264(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts) {
API_EXPORT int API_CALL mk_media_input_h264(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts) {
assert(ctx && data && len > 0);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->getChannel()->inputH264((const char *) data, len, dts, pts);
return (*obj)->getChannel()->inputH264((const char *) data, len, dts, pts);
}
API_EXPORT void API_CALL mk_media_input_h265(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts) {
API_EXPORT int API_CALL mk_media_input_h265(mk_media ctx, const void *data, int len, uint32_t dts, uint32_t pts) {
assert(ctx && data && len > 0);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->getChannel()->inputH265((const char *) data, len, dts, pts);
return (*obj)->getChannel()->inputH265((const char *) data, len, dts, pts);
}
API_EXPORT void API_CALL mk_media_input_aac(mk_media ctx, const void *data, int len, uint32_t dts, void *adts) {
API_EXPORT int API_CALL mk_media_input_aac(mk_media ctx, const void *data, int len, uint32_t dts, void *adts) {
assert(ctx && data && len > 0 && adts);
MediaHelper::Ptr *obj = (MediaHelper::Ptr *) ctx;
(*obj)->getChannel()->inputAAC((const char *) data, len, dts, (char *) adts);
return (*obj)->getChannel()->inputAAC((const char *) data, len, dts, (char *) adts);
}
API_EXPORT void API_CALL mk_media_input_pcm(mk_media ctx, void *data , int len, uint32_t pts){
API_EXPORT int API_CALL mk_media_input_pcm(mk_media ctx, void *data , int len, uint32_t pts){
assert(ctx && data && len > 0);
MediaHelper::Ptr* obj = (MediaHelper::Ptr*) ctx;
(*obj)->getChannel()->inputPCM((char*)data, len, pts);
return (*obj)->getChannel()->inputPCM((char*)data, len, pts);
}
API_EXPORT void API_CALL mk_media_input_audio(mk_media ctx, const void* data, int len, uint32_t dts){
API_EXPORT int API_CALL mk_media_input_audio(mk_media ctx, const void* data, int len, uint32_t dts){
assert(ctx && data && len > 0);
MediaHelper::Ptr* obj = (MediaHelper::Ptr*) ctx;
(*obj)->getChannel()->inputAudio((const char*)data, len, dts);
return (*obj)->getChannel()->inputAudio((const char*)data, len, dts);
}
API_EXPORT void API_CALL mk_media_start_send_rtp(mk_media ctx, const char *dst_url, uint16_t dst_port, const char *ssrc, int is_udp, on_mk_media_send_rtp_result cb, void *user_data){

View File

@ -77,10 +77,11 @@ public:
//播放成功,添加事件回调
weak_ptr<MediaPlayerForC> weak_self = shared_from_this();
auto delegate = std::make_shared<FrameWriterInterfaceHelper>([weak_self](const Frame::Ptr &frame) {
auto strong_self = weak_self.lock();
if (strong_self) {
if (auto strong_self = weak_self.lock()) {
strong_self->onData(frame);
return true;
}
return false;
});
for (auto &track : _player->getTracks(false)) {
track->addDelegate(delegate);

View File

@ -254,30 +254,29 @@ const AVCodecContext *FFmpegDecoder::getContext() const {
return _context.get();
}
void FFmpegDecoder::inputFrame_l(const Frame::Ptr &frame) {
bool FFmpegDecoder::inputFrame_l(const Frame::Ptr &frame) {
if (_do_merger) {
_merger.inputFrame(frame, [&](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer, bool have_idr) {
return _merger.inputFrame(frame, [&](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer, bool have_idr) {
decodeFrame(buffer->data(), buffer->size(), dts, pts);
});
} else {
decodeFrame(frame->data(), frame->size(), frame->dts(), frame->pts());
}
return decodeFrame(frame->data(), frame->size(), frame->dts(), frame->pts());
}
void FFmpegDecoder::inputFrame(const Frame::Ptr &frame) {
bool FFmpegDecoder::inputFrame(const Frame::Ptr &frame) {
if (!TaskManager::isEnabled()) {
inputFrame_l(frame);
} else {
return inputFrame_l(frame);
}
auto frame_cache = Frame::getCacheAbleFrame(frame);
addDecodeTask(frame->keyFrame(), [this, frame_cache]() {
inputFrame_l(frame_cache);
//此处模拟解码太慢导致的主动丢帧
//usleep(100 * 1000);
});
}
return true;
}
void FFmpegDecoder::decodeFrame(const char *data, size_t size, uint32_t dts, uint32_t pts) {
bool FFmpegDecoder::decodeFrame(const char *data, size_t size, uint32_t dts, uint32_t pts) {
TimeTicker2(30, TraceL);
auto pkt = alloc_av_packet();
@ -291,7 +290,7 @@ void FFmpegDecoder::decodeFrame(const char *data, size_t size, uint32_t dts, uin
if (ret != AVERROR_INVALIDDATA) {
WarnL << "avcodec_send_packet failed:" << ffmpeg_err(ret);
}
return;
return false;
}
while (true) {
@ -311,6 +310,7 @@ void FFmpegDecoder::decodeFrame(const char *data, size_t size, uint32_t dts, uin
}
onDecode(out_frame);
}
return true;
}
void FFmpegDecoder::setOnDecode(FFmpegDecoder::onDec cb) {

View File

@ -95,15 +95,15 @@ public:
FFmpegDecoder(const Track::Ptr &track);
~FFmpegDecoder();
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
void setOnDecode(onDec cb);
void flush();
const AVCodecContext *getContext() const;
private:
void onDecode(const FFmpegFrame::Ptr &frame);
void inputFrame_l(const Frame::Ptr &frame);
void decodeFrame(const char *data, size_t size, uint32_t dts, uint32_t pts);
bool inputFrame_l(const Frame::Ptr &frame);
bool decodeFrame(const char *data, size_t size, uint32_t dts, uint32_t pts);
private:
bool _do_merger = false;

View File

@ -84,7 +84,7 @@ int main(int argc, char *argv[]) {
});
});
auto delegate = std::make_shared<FrameWriterInterfaceHelper>([decoder](const Frame::Ptr &frame) {
decoder->inputFrame(frame);
return decoder->inputFrame(frame);
});
videoTrack->addDelegate(delegate);
}
@ -106,7 +106,7 @@ int main(int argc, char *argv[]) {
audio_player->playPCM((const char *) (pcm->get()->data[0]), len);
});
auto audio_delegate = std::make_shared<FrameWriterInterfaceHelper>( [decoder](const Frame::Ptr &frame) {
decoder->inputFrame(frame);
return decoder->inputFrame(frame);
});
audioTrack->addDelegate(audio_delegate);
}

View File

@ -33,7 +33,7 @@ DevChannel::DevChannel(const string &vhost, const string &app, const string &str
DevChannel::~DevChannel() {}
void DevChannel::inputYUV(char* apcYuv[3], int aiYuvLen[3], uint32_t uiStamp) {
bool DevChannel::inputYUV(char* apcYuv[3], int aiYuvLen[3], uint32_t uiStamp) {
#ifdef ENABLE_X264
//TimeTicker1(50);
if (!_pH264Enc) {
@ -46,16 +46,20 @@ void DevChannel::inputYUV(char* apcYuv[3], int aiYuvLen[3], uint32_t uiStamp) {
if (_pH264Enc) {
H264Encoder::H264Frame *pOut;
int iFrames = _pH264Enc->inputData(apcYuv, aiYuvLen, uiStamp, &pOut);
bool ret = false;
for (int i = 0; i < iFrames; i++) {
inputH264((char *) pOut[i].pucData, pOut[i].iLength, uiStamp);
ret = inputH264((char *) pOut[i].pucData, pOut[i].iLength, uiStamp) ? true : ret;
}
return ret;
}
return false;
#else
WarnL << "h264编码未启用,该方法无效,编译时请打开ENABLE_X264选项";
return false;
#endif //ENABLE_X264
}
void DevChannel::inputPCM(char* pcData, int iDataLen, uint32_t uiStamp) {
bool DevChannel::inputPCM(char* pcData, int iDataLen, uint32_t uiStamp) {
#ifdef ENABLE_FAAC
if (!_pAacEnc) {
_pAacEnc.reset(new AACEncoder());
@ -68,15 +72,17 @@ void DevChannel::inputPCM(char* pcData, int iDataLen, uint32_t uiStamp) {
unsigned char *pucOut;
int iRet = _pAacEnc->inputData(pcData, iDataLen, &pucOut);
if (iRet > 7) {
inputAAC((char *) pucOut + 7, iRet - 7, uiStamp, (char *)pucOut);
return inputAAC((char *) pucOut + 7, iRet - 7, uiStamp, (char *)pucOut);
}
}
return false;
#else
WarnL << "aac编码未启用,该方法无效,编译时请打开ENABLE_FAAC选项";
return false;
#endif //ENABLE_FAAC
}
void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts) {
bool DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts) {
if(dts == 0){
dts = (uint32_t)_aTicker[0].elapsedTime();
}
@ -92,10 +98,10 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts
frame->_pts = pts;
frame->_buffer.assign(data, len);
frame->_prefix_size = prefixSize(data,len);
inputFrame(frame);
return inputFrame(frame);
}
void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts) {
bool DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts) {
if(dts == 0){
dts = (uint32_t)_aTicker[0].elapsedTime();
}
@ -111,7 +117,7 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts
frame->_pts = pts;
frame->_buffer.assign(data, len);
frame->_prefix_size = prefixSize(data,len);
inputFrame(frame);
return inputFrame(frame);
}
class FrameAutoDelete : public FrameFromPtr{
@ -128,52 +134,53 @@ public:
}
};
void DevChannel::inputAAC(const char *data_without_adts, int len, uint32_t dts, const char *adts_header){
bool DevChannel::inputAAC(const char *data_without_adts, int len, uint32_t dts, const char *adts_header){
if (dts == 0) {
dts = (uint32_t) _aTicker[1].elapsedTime();
}
if (adts_header) {
if (!adts_header) {
//没有adts头
return inputFrame(std::make_shared<FrameFromPtr>(_audio->codecId, (char *) data_without_adts, len, dts, 0, 0));
}
if (adts_header + ADTS_HEADER_LEN == data_without_adts) {
//adts头和帧在一起
inputFrame(std::make_shared<FrameFromPtr>(_audio->codecId, (char *) data_without_adts - ADTS_HEADER_LEN, len + ADTS_HEADER_LEN, dts, 0, ADTS_HEADER_LEN));
} else {
return inputFrame(std::make_shared<FrameFromPtr>(_audio->codecId, (char *) data_without_adts - ADTS_HEADER_LEN, len + ADTS_HEADER_LEN, dts, 0, ADTS_HEADER_LEN));
}
//adts头和帧不在一起
char *data_with_adts = new char[len + ADTS_HEADER_LEN];
memcpy(data_with_adts, adts_header, ADTS_HEADER_LEN);
memcpy(data_with_adts + ADTS_HEADER_LEN, data_without_adts, len);
inputFrame(std::make_shared<FrameAutoDelete>(_audio->codecId, data_with_adts, len + ADTS_HEADER_LEN, dts, 0, ADTS_HEADER_LEN));
}
} else {
//没有adts头
inputFrame(std::make_shared<FrameFromPtr>(_audio->codecId, (char *) data_without_adts, len, dts, 0, 0));
}
return inputFrame(std::make_shared<FrameAutoDelete>(_audio->codecId, data_with_adts, len + ADTS_HEADER_LEN, dts, 0, ADTS_HEADER_LEN));
}
void DevChannel::inputAudio(const char *data, int len, uint32_t dts){
bool DevChannel::inputAudio(const char *data, int len, uint32_t dts){
if (dts == 0) {
dts = (uint32_t) _aTicker[1].elapsedTime();
}
inputFrame(std::make_shared<FrameFromPtr>(_audio->codecId, (char *) data, len, dts, 0));
return inputFrame(std::make_shared<FrameFromPtr>(_audio->codecId, (char *) data, len, dts, 0));
}
void DevChannel::initVideo(const VideoInfo &info) {
bool DevChannel::initVideo(const VideoInfo &info) {
_video = std::make_shared<VideoInfo>(info);
switch (info.codecId){
case CodecH265 : addTrack(std::make_shared<H265Track>()); break;
case CodecH264 : addTrack(std::make_shared<H264Track>()); break;
default: WarnL << "不支持该类型的视频编码类型:" << info.codecId; break;
case CodecH265 : return addTrack(std::make_shared<H265Track>());
case CodecH264 : return addTrack(std::make_shared<H264Track>());
default: WarnL << "不支持该类型的视频编码类型:" << info.codecId; return false;
}
}
void DevChannel::initAudio(const AudioInfo &info) {
bool DevChannel::initAudio(const AudioInfo &info) {
_audio = std::make_shared<AudioInfo>(info);
switch (info.codecId) {
case CodecAAC : addTrack(std::make_shared<AACTrack>()); break;
case CodecAAC : return addTrack(std::make_shared<AACTrack>());
case CodecG711A :
case CodecG711U : addTrack(std::make_shared<G711Track>(info.codecId, info.iSampleRate, info.iChannel, info.iSampleBit)); break;
case CodecOpus : addTrack(std::make_shared<OpusTrack>()); break;
default: WarnL << "不支持该类型的音频编码类型:" << info.codecId; break;
case CodecG711U : return addTrack(std::make_shared<G711Track>(info.codecId, info.iSampleRate, info.iChannel, info.iSampleBit));
case CodecOpus : return addTrack(std::make_shared<OpusTrack>());
default: WarnL << "不支持该类型的音频编码类型:" << info.codecId; return false;
}
}

View File

@ -58,14 +58,14 @@ public:
* MultiMediaSourceMuxer::addTrack(VideoTrack::Ptr );
* @param info
*/
void initVideo(const VideoInfo &info);
bool initVideo(const VideoInfo &info);
/**
* Track
* MultiMediaSourceMuxer::addTrack(AudioTrack::Ptr );
* @param info
*/
void initAudio(const AudioInfo &info);
bool initAudio(const AudioInfo &info);
/**
* 264
@ -74,7 +74,7 @@ public:
* @param dts 0
* @param pts 0dts
*/
void inputH264(const char *data, int len, uint32_t dts, uint32_t pts = 0);
bool inputH264(const char *data, int len, uint32_t dts, uint32_t pts = 0);
/**
* 265
@ -83,7 +83,7 @@ public:
* @param dts 0
* @param pts 0dts
*/
void inputH265(const char *data, int len, uint32_t dts, uint32_t pts = 0);
bool inputH265(const char *data, int len, uint32_t dts, uint32_t pts = 0);
/**
* aac帧
@ -92,7 +92,7 @@ public:
* @param dts
* @param adts_header adts头
*/
void inputAAC(const char *data_without_adts, int len, uint32_t dts, const char *adts_header);
bool inputAAC(const char *data_without_adts, int len, uint32_t dts, const char *adts_header);
/**
* OPUS/G711音频帧
@ -100,7 +100,7 @@ public:
* @param len
* @param dts
*/
void inputAudio(const char *data, int len, uint32_t dts);
bool inputAudio(const char *data, int len, uint32_t dts);
/**
* yuv420p视频帧inputH264方法
@ -108,8 +108,7 @@ public:
* @param aiYuvLen
* @param uiStamp
*/
void inputYUV(char *apcYuv[3], int aiYuvLen[3], uint32_t uiStamp);
bool inputYUV(char *apcYuv[3], int aiYuvLen[3], uint32_t uiStamp);
/**
* pcm数据inputAAC方法
@ -117,7 +116,7 @@ public:
* @param iDataLen
* @param uiStamp
*/
void inputPCM(char *pcData, int iDataLen, uint32_t uiStamp);
bool inputPCM(char *pcData, int iDataLen, uint32_t uiStamp);
private:
MediaOriginType getOriginType(MediaSource &sender) const override;

View File

@ -9,6 +9,7 @@
*/
#include "MediaSink.h"
#include "Extension/AAC.h"
//最多等待未初始化的Track 10秒超时之后会忽略未初始化的Track
static size_t constexpr kMaxWaitReadyMS= 10000;
@ -21,35 +22,45 @@ static size_t constexpr kMaxUnreadyFrame = 100;
namespace mediakit{
void MediaSink::addTrack(const Track::Ptr &track_in) {
bool MediaSink::addTrack(const Track::Ptr &track_in) {
GET_CONFIG(bool, enabel_audio, General::kEnableAudio);
if (!enabel_audio) {
//关闭音频时,加快单视频流注册速度
_max_track_size = 1;
if (track_in->getTrackType() == TrackAudio) {
//音频被全局忽略
return;
return false;
}
} else {
//是否添加静音音频
GET_CONFIG(bool, addMuteAudio, General::kAddMuteAudio);
if (addMuteAudio && track_in->getTrackType() == TrackVideo) {
addMuteAudioTrack();
}
}
lock_guard<recursive_mutex> lck(_mtx);
if (_all_track_ready) {
WarnL << "all track is ready, add this track too late!";
return;
return false;
}
//克隆Track只拷贝其数据不拷贝其数据转发关系
auto track = track_in->clone();
auto codec_id = track->getCodecId();
_track_map[codec_id] = track;
_track_ready_callback[codec_id] = [this, track]() {
auto track_type = track->getTrackType();
if (track_type == TrackAudio) {
//确保添加非静音音频track时取消之前的静音音频track
_mute_audio_maker = nullptr;
}
_track_map[track_type] = track;
_track_ready_callback[track_type] = [this, track]() {
onTrackReady(track);
};
_ticker.resetTime();
track->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) {
if (_all_track_ready) {
onTrackFrame(frame);
} else {
auto &frame_unread = _frame_unread[frame->getCodecId()];
return onTrackFrame(frame);
}
auto &frame_unread = _frame_unread[frame->getTrackType()];
if (frame_unread.size() > kMaxUnreadyFrame) {
//未就绪的的track不能缓存太多的帧否则可能内存溢出
frame_unread.clear();
@ -57,8 +68,9 @@ void MediaSink::addTrack(const Track::Ptr &track_in) {
}
//还有Track未就绪先缓存之
frame_unread.emplace_back(Frame::getCacheAbleFrame(frame));
}
return true;
}));
return true;
}
void MediaSink::resetTracks() {
@ -71,19 +83,24 @@ void MediaSink::resetTracks() {
_frame_unread.clear();
}
void MediaSink::inputFrame(const Frame::Ptr &frame) {
bool MediaSink::inputFrame(const Frame::Ptr &frame) {
lock_guard<recursive_mutex> lck(_mtx);
auto it = _track_map.find(frame->getCodecId());
auto it = _track_map.find(frame->getTrackType());
if (it == _track_map.end()) {
return;
return false;
}
auto ret = it->second->inputFrame(frame);
if (_mute_audio_maker && frame->getTrackType() == TrackVideo) {
//视频驱动产生静音音频
_mute_audio_maker->inputFrame(frame);
}
it->second->inputFrame(frame);
checkTrackIfReady(nullptr);
return ret;
}
void MediaSink::checkTrackIfReady_l(const Track::Ptr &track){
//Track由未就绪状态转换成就绪状态我们就触发onTrackReady回调
auto it_callback = _track_ready_callback.find(track->getCodecId());
auto it_callback = _track_ready_callback.find(track->getTrackType());
if (it_callback != _track_ready_callback.end() && track->ready()) {
it_callback->second();
_track_ready_callback.erase(it_callback);
@ -184,5 +201,72 @@ vector<Track::Ptr> MediaSink::getTracks(bool trackReady) const{
return ret;
}
class FrameFromStaticPtr : public FrameFromPtr {
public:
template<typename ... ARGS>
FrameFromStaticPtr(ARGS &&...args) : FrameFromPtr(std::forward<ARGS>(args)...) {};
~FrameFromStaticPtr() override = default;
bool cacheAble() const override {
return true;
}
};
static uint8_t s_mute_adts[] = {0xff, 0xf1, 0x6c, 0x40, 0x2d, 0x3f, 0xfc, 0x00, 0xe0, 0x34, 0x20, 0xad, 0xf2, 0x3f, 0xb5, 0xdd,
0x73, 0xac, 0xbd, 0xca, 0xd7, 0x7d, 0x4a, 0x13, 0x2d, 0x2e, 0xa2, 0x62, 0x02, 0x70, 0x3c, 0x1c,
0xc5, 0x63, 0x55, 0x69, 0x94, 0xb5, 0x8d, 0x70, 0xd7, 0x24, 0x6a, 0x9e, 0x2e, 0x86, 0x24, 0xea,
0x4f, 0xd4, 0xf8, 0x10, 0x53, 0xa5, 0x4a, 0xb2, 0x9a, 0xf0, 0xa1, 0x4f, 0x2f, 0x66, 0xf9, 0xd3,
0x8c, 0xa6, 0x97, 0xd5, 0x84, 0xac, 0x09, 0x25, 0x98, 0x0b, 0x1d, 0x77, 0x04, 0xb8, 0x55, 0x49,
0x85, 0x27, 0x06, 0x23, 0x58, 0xcb, 0x22, 0xc3, 0x20, 0x3a, 0x12, 0x09, 0x48, 0x24, 0x86, 0x76,
0x95, 0xe3, 0x45, 0x61, 0x43, 0x06, 0x6b, 0x4a, 0x61, 0x14, 0x24, 0xa9, 0x16, 0xe0, 0x97, 0x34,
0xb6, 0x58, 0xa4, 0x38, 0x34, 0x90, 0x19, 0x5d, 0x00, 0x19, 0x4a, 0xc2, 0x80, 0x4b, 0xdc, 0xb7,
0x00, 0x18, 0x12, 0x3d, 0xd9, 0x93, 0xee, 0x74, 0x13, 0x95, 0xad, 0x0b, 0x59, 0x51, 0x0e, 0x99,
0xdf, 0x49, 0x98, 0xde, 0xa9, 0x48, 0x4b, 0xa5, 0xfb, 0xe8, 0x79, 0xc9, 0xe2, 0xd9, 0x60, 0xa5,
0xbe, 0x74, 0xa6, 0x6b, 0x72, 0x0e, 0xe3, 0x7b, 0x28, 0xb3, 0x0e, 0x52, 0xcc, 0xf6, 0x3d, 0x39,
0xb7, 0x7e, 0xbb, 0xf0, 0xc8, 0xce, 0x5c, 0x72, 0xb2, 0x89, 0x60, 0x33, 0x7b, 0xc5, 0xda, 0x49,
0x1a, 0xda, 0x33, 0xba, 0x97, 0x9e, 0xa8, 0x1b, 0x6d, 0x5a, 0x77, 0xb6, 0xf1, 0x69, 0x5a, 0xd1,
0xbd, 0x84, 0xd5, 0x4e, 0x58, 0xa8, 0x5e, 0x8a, 0xa0, 0xc2, 0xc9, 0x22, 0xd9, 0xa5, 0x53, 0x11,
0x18, 0xc8, 0x3a, 0x39, 0xcf, 0x3f, 0x57, 0xb6, 0x45, 0x19, 0x1e, 0x8a, 0x71, 0xa4, 0x46, 0x27,
0x9e, 0xe9, 0xa4, 0x86, 0xdd, 0x14, 0xd9, 0x4d, 0xe3, 0x71, 0xe3, 0x26, 0xda, 0xaa, 0x17, 0xb4,
0xac, 0xe1, 0x09, 0xc1, 0x0d, 0x75, 0xba, 0x53, 0x0a, 0x37, 0x8b, 0xac, 0x37, 0x39, 0x41, 0x27,
0x6a, 0xf0, 0xe9, 0xb4, 0xc2, 0xac, 0xb0, 0x39, 0x73, 0x17, 0x64, 0x95, 0xf4, 0xdc, 0x33, 0xbb,
0x84, 0x94, 0x3e, 0xf8, 0x65, 0x71, 0x60, 0x7b, 0xd4, 0x5f, 0x27, 0x79, 0x95, 0x6a, 0xba, 0x76,
0xa6, 0xa5, 0x9a, 0xec, 0xae, 0x55, 0x3a, 0x27, 0x48, 0x23, 0xcf, 0x5c, 0x4d, 0xbc, 0x0b, 0x35,
0x5c, 0xa7, 0x17, 0xcf, 0x34, 0x57, 0xc9, 0x58, 0xc5, 0x20, 0x09, 0xee, 0xa5, 0xf2, 0x9c, 0x6c,
0x39, 0x1a, 0x77, 0x92, 0x9b, 0xff, 0xc6, 0xae, 0xf8, 0x36, 0xba, 0xa8, 0xaa, 0x6b, 0x1e, 0x8c,
0xc5, 0x97, 0x39, 0x6a, 0xb8, 0xa2, 0x55, 0xa8, 0xf8};
#define MUTE_ADTS_DATA s_mute_adts
#define MUTE_ADTS_DATA_LEN sizeof(s_mute_adts)
#define MUTE_ADTS_DATA_MS 130
bool MuteAudioMaker::inputFrame(const Frame::Ptr &frame) {
if (frame->getTrackType() == TrackVideo) {
auto audio_idx = frame->dts() / MUTE_ADTS_DATA_MS;
if (_audio_idx != audio_idx) {
_audio_idx = audio_idx;
auto aacFrame = std::make_shared<FrameFromStaticPtr>(CodecAAC, (char *) MUTE_ADTS_DATA, MUTE_ADTS_DATA_LEN,
_audio_idx * MUTE_ADTS_DATA_MS, 0, ADTS_HEADER_LEN);
return FrameDispatcher::inputFrame(aacFrame);
}
}
return false;
}
bool MediaSink::addMuteAudioTrack() {
if (getTrack(TrackAudio, false)) {
WarnL << "audio track already existed";
return false;
}
if (addTrack(std::make_shared<AACTrack>())) {
_mute_audio_maker = std::make_shared<MuteAudioMaker>();
_mute_audio_maker->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) {
return inputFrame(frame);
}));
return true;
}
return false;
}
}//namespace mediakit

View File

@ -32,7 +32,7 @@ public:
* sps pps这些信息 Delegate相关关系
* @param track
*/
virtual void addTrack(const Track::Ptr & track) = 0;
virtual bool addTrack(const Track::Ptr & track) = 0;
/**
* track完毕
@ -53,6 +53,20 @@ public:
~MediaSinkInterface() override = default;
};
/**
* aac静音音频添加器
*/
class MuteAudioMaker : public FrameDispatcher {
public:
typedef std::shared_ptr<MuteAudioMaker> Ptr;
MuteAudioMaker() = default;
~MuteAudioMaker() override = default;
bool inputFrame(const Frame::Ptr &frame) override;
private:
uint32_t _audio_idx = 0;
};
/**
* Track ready()true也就是就绪后再通知派生类进行下一步的操作
* Frame前由Track截取处理下便sps pps aa_cfg
@ -67,14 +81,14 @@ public:
* frame
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* trackTrack的clone方法
* sps pps这些信息 Delegate相关关系
* @param track
*/
void addTrack(const Track::Ptr & track) override;
bool addTrack(const Track::Ptr & track) override;
/**
* Track完毕Track3onAllTrackReady
@ -94,13 +108,18 @@ public:
*/
vector<Track::Ptr> getTracks(bool trackReady = true) const override;
/**
* aac静音轨道
*/
bool addMuteAudioTrack();
protected:
/**
* track已经准备好ready()true
* sps pps等相关信息了
* @param track
*/
virtual void onTrackReady(const Track::Ptr & track) {};
virtual bool onTrackReady(const Track::Ptr & track) { return false; };
/**
* Track已经准备好
@ -111,7 +130,7 @@ protected:
* Track输出frameonAllTrackReady触发后才会调用此方法
* @param frame
*/
virtual void onTrackFrame(const Frame::Ptr &frame) {};
virtual bool onTrackFrame(const Frame::Ptr &frame) { return false; };
private:
/**
@ -133,6 +152,7 @@ private:
unordered_map<int,List<Frame::Ptr> > _frame_unread;
unordered_map<int,function<void()> > _track_ready_callback;
Ticker _ticker;
MuteAudioMaker::Ptr _mute_audio_maker;
};

View File

@ -244,36 +244,38 @@ vector<Track::Ptr> MultiMediaSourceMuxer::getMediaTracks(MediaSource &sender, bo
return getTracks(trackReady);
}
void MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {
bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {
if (CodecL16 == track->getCodecId()) {
WarnL << "L16音频格式目前只支持RTSP协议推流拉流!!!";
return;
return false;
}
bool ret = false;
if (_rtmp) {
_rtmp->addTrack(track);
ret = _rtmp->addTrack(track) ? true : ret;
}
if (_rtsp) {
_rtsp->addTrack(track);
ret = _rtsp->addTrack(track) ? true : ret;
}
if (_ts) {
_ts->addTrack(track);
ret = _ts->addTrack(track) ? true : ret;
}
#if defined(ENABLE_MP4)
if (_fmp4) {
_fmp4->addTrack(track);
ret = _fmp4->addTrack(track) ? true : ret;
}
#endif
//拷贝智能指针目的是为了防止跨线程调用设置录像相关api导致的线程竞争问题
auto hls = _hls;
if (hls) {
hls->addTrack(track);
ret = hls->addTrack(track) ? true : ret;
}
auto mp4 = _mp4;
if (mp4) {
mp4->addTrack(track);
ret = mp4->addTrack(track) ? true : ret;
}
return ret;
}
void MultiMediaSourceMuxer::onAllTrackReady() {
@ -386,7 +388,7 @@ private:
Frame::Ptr _frame;
};
void MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
GET_CONFIG(bool, modify_stamp, General::kModifyStamp);
auto frame = frame_in;
if (modify_stamp) {
@ -394,39 +396,41 @@ void MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
frame = std::make_shared<FrameModifyStamp>(frame, _stamp[frame->getTrackType()]);
}
bool ret = false;
if (_rtmp) {
_rtmp->inputFrame(frame);
ret = _rtmp->inputFrame(frame) ? true : ret;
}
if (_rtsp) {
_rtsp->inputFrame(frame);
ret = _rtsp->inputFrame(frame) ? true : ret;
}
if (_ts) {
_ts->inputFrame(frame);
ret = _ts->inputFrame(frame) ? true : ret;
}
//拷贝智能指针目的是为了防止跨线程调用设置录像相关api导致的线程竞争问题
//此处使用智能指针拷贝来确保线程安全,比互斥锁性能更优
auto hls = _hls;
if (hls) {
hls->inputFrame(frame);
ret = hls->inputFrame(frame) ? true : ret;
}
auto mp4 = _mp4;
if (mp4) {
mp4->inputFrame(frame);
ret = mp4->inputFrame(frame) ? true : ret;
}
#if defined(ENABLE_MP4)
if (_fmp4) {
_fmp4->inputFrame(frame);
ret = _fmp4->inputFrame(frame) ? true : ret;
}
#endif
#if defined(ENABLE_RTPPROXY)
lock_guard<mutex> lck(_rtp_sender_mtx);
for (auto &pr : _rtp_sender) {
pr.second->inputFrame(frame);
ret = pr.second->inputFrame(frame) ? true : ret;
}
#endif //ENABLE_RTPPROXY
return ret;
}
bool MultiMediaSourceMuxer::isEnabled(){

View File

@ -127,7 +127,7 @@ protected:
* sps pps等相关信息了
* @param track
*/
void onTrackReady(const Track::Ptr & track) override;
bool onTrackReady(const Track::Ptr & track) override;
/**
* Track已经准备好
@ -138,7 +138,7 @@ protected:
* Track输出frameonAllTrackReady触发后才会调用此方法
* @param frame
*/
void onTrackFrame(const Frame::Ptr &frame) override;
bool onTrackFrame(const Frame::Ptr &frame) override;
private:
bool _is_enable = false;

View File

@ -263,8 +263,12 @@ int AACTrack::getAudioChannel() const {
return _channel;
}
void AACTrack::inputFrame(const Frame::Ptr &frame) {
if (frame->prefixSize()) {
bool AACTrack::inputFrame(const Frame::Ptr &frame) {
if (!frame->prefixSize()) {
return inputFrame_l(frame);
}
bool ret = false;
//有adts头尝试分帧
auto ptr = frame->data();
auto end = frame->data() + frame->size();
@ -276,14 +280,14 @@ void AACTrack::inputFrame(const Frame::Ptr &frame) {
auto sub_frame = std::make_shared<FrameInternal<FrameFromPtr> >(frame, (char *) ptr, frame_len, ADTS_HEADER_LEN);
ptr += frame_len;
sub_frame->setCodecId(CodecAAC);
inputFrame_l(sub_frame);
if (inputFrame_l(sub_frame)) {
ret = true;
}
} else {
inputFrame_l(frame);
}
return ret;
}
void AACTrack::inputFrame_l(const Frame::Ptr &frame) {
bool AACTrack::inputFrame_l(const Frame::Ptr &frame) {
if (_cfg.empty()) {
//未获取到aac_cfg信息
if (frame->prefixSize()) {
@ -297,8 +301,9 @@ void AACTrack::inputFrame_l(const Frame::Ptr &frame) {
if (frame->size() > frame->prefixSize()) {
//除adts头外有实际负载
AudioTrack::inputFrame(frame);
return AudioTrack::inputFrame(frame);
}
return false;
}
void AACTrack::onReady() {

View File

@ -51,13 +51,13 @@ public:
int getAudioChannel() const override;
int getAudioSampleRate() const override;
int getAudioSampleBit() const override;
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
private:
void onReady();
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
void inputFrame_l(const Frame::Ptr &frame);
bool inputFrame_l(const Frame::Ptr &frame);
private:
string _cfg;

View File

@ -85,7 +85,7 @@ void AACRtmpEncoder::makeConfigPacket() {
}
}
void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
bool AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (_aac_cfg.empty()) {
if (frame->prefixSize()) {
//包含adts头,从adts头获取aac配置信息
@ -94,7 +94,10 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
makeConfigPacket();
}
if(!_aac_cfg.empty()){
if(_aac_cfg.empty()){
return false;
}
auto rtmpPkt = RtmpPacket::create();
//header
uint8_t is_config = false;
@ -110,7 +113,7 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
rtmpPkt->time_stamp = frame->dts();
rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt);
}
return true;
}
void AACRtmpEncoder::makeAudioConfigPkt() {

View File

@ -64,7 +64,7 @@ public:
* aac adts头
* @param frame aac数据
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* config包

View File

@ -24,7 +24,7 @@ AACRtpEncoder::AACRtpEncoder(uint32_t ui32Ssrc,
ui8Interleaved){
}
void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
bool AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto stamp = frame->dts();
auto data = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
@ -50,6 +50,7 @@ void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
ptr += max_size;
remain_size -= max_size;
}
return len > 0;
}
void AACRtpEncoder::makeAACRtp(const void *data, size_t len, bool mark, uint32_t uiStamp) {

View File

@ -74,7 +74,7 @@ public:
* aac dats头
* @param frame dats头的aac数据
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
private:
void makeAACRtp(const void *pData, size_t uiLen, bool bMark, uint32_t uiStamp);

View File

@ -42,9 +42,9 @@ CommonRtmpEncoder::CommonRtmpEncoder(const Track::Ptr &track) : CommonRtmpDecode
_audio_flv_flags = getAudioRtmpFlags(track);
}
void CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
bool CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (!_audio_flv_flags) {
return;
return false;
}
auto rtmp = RtmpPacket::create();
//header
@ -57,6 +57,7 @@ void CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
rtmp->time_stamp = frame->dts();
rtmp->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmp);
return true;
}
}//namespace mediakit

View File

@ -63,7 +63,7 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
private:
uint8_t _audio_flv_flags = 0;

View File

@ -68,7 +68,7 @@ CommonRtpEncoder::CommonRtpEncoder(CodecId codec, uint32_t ssrc, uint32_t mtu_si
: CommonRtpDecoder(codec), RtpInfo(ssrc, mtu_size, sample_rate, payload_type, interleaved) {
}
void CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
auto stamp = frame->pts();
auto ptr = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
@ -88,4 +88,5 @@ void CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
ptr += rtp_size;
remain_size -= rtp_size;
}
return len > 0;
}

View File

@ -78,7 +78,7 @@ public:
/**
* rtp
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
};
}//namespace mediakit

View File

@ -213,7 +213,7 @@ void FrameMerger::doMerge(BufferLikeString &merged, const Frame::Ptr &frame) con
}
}
void FrameMerger::inputFrame(const Frame::Ptr &frame, const onOutput &cb, BufferLikeString *buffer) {
bool FrameMerger::inputFrame(const Frame::Ptr &frame, const onOutput &cb, BufferLikeString *buffer) {
if (willFlush(frame)) {
Frame::Ptr back = _frame_cache.back();
Buffer::Ptr merged_frame = back;
@ -246,7 +246,7 @@ void FrameMerger::inputFrame(const Frame::Ptr &frame, const onOutput &cb, Buffer
case mp4_nal_size: {
if (frame->dropAble()) {
//h264头和mp4头模式过滤无效的帧
return;
return false;
}
break;
}
@ -257,6 +257,7 @@ void FrameMerger::inputFrame(const Frame::Ptr &frame, const onOutput &cb, Buffer
_have_decode_able_frame = true;
}
_frame_cache.emplace_back(Frame::getCacheAbleFrame(frame));
return true;
}
FrameMerger::FrameMerger(int type) {

View File

@ -277,7 +277,7 @@ public:
/**
*
*/
virtual void inputFrame(const Frame::Ptr &frame) = 0;
virtual bool inputFrame(const Frame::Ptr &frame) = 0;
};
/**
@ -286,7 +286,7 @@ public:
class FrameWriterInterfaceHelper : public FrameWriterInterface {
public:
typedef std::shared_ptr<FrameWriterInterfaceHelper> Ptr;
typedef std::function<void(const Frame::Ptr &frame)> onWriteFrame;
typedef std::function<bool(const Frame::Ptr &frame)> onWriteFrame;
/**
* inputFrame后触发onWriteFrame回调
@ -300,9 +300,10 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override {
_writeCallback(frame);
bool inputFrame(const Frame::Ptr &frame) override {
return _writeCallback(frame);
}
private:
onWriteFrame _writeCallback;
};
@ -340,7 +341,7 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override{
bool inputFrame(const Frame::Ptr &frame) override{
if(_need_update){
//发现代理列表发生变化了,这里同步一次
lock_guard<mutex> lck(_mtx);
@ -349,10 +350,14 @@ public:
}
//_delegates_read能确保是单线程操作的
bool ret = false;
for (auto &pr : _delegates_read) {
pr.second->inputFrame(frame);
if (pr.second->inputFrame(frame)) {
ret = true;
}
}
return ret;
}
/**
*
@ -503,7 +508,7 @@ public:
~FrameMerger() = default;
void clear();
void inputFrame(const Frame::Ptr &frame, const onOutput &cb, BufferLikeString *buffer = nullptr);
bool inputFrame(const Frame::Ptr &frame, const onOutput &cb, BufferLikeString *buffer = nullptr);
private:
bool willFlush(const Frame::Ptr &frame) const;

View File

@ -144,18 +144,22 @@ bool H264Track::ready() {
return !_sps.empty() && !_pps.empty();
}
void H264Track::inputFrame(const Frame::Ptr &frame) {
bool H264Track::inputFrame(const Frame::Ptr &frame) {
using H264FrameInternal = FrameInternal<H264FrameNoCacheAble>;
int type = H264_TYPE(frame->data()[frame->prefixSize()]);
if (type != H264Frame::NAL_B_P && type != H264Frame::NAL_IDR) {
if (type == H264Frame::NAL_B_P || type == H264Frame::NAL_IDR) {
return inputFrame_l(frame);
}
//非I/B/P帧情况下split一下防止多个帧粘合在一起
bool ret = false;
splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, size_t len, size_t prefix) {
H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame, (char *) ptr, len, prefix);
inputFrame_l(sub_frame);
});
} else {
inputFrame_l(frame);
if (inputFrame_l(sub_frame)) {
ret = true;
}
});
return ret;
}
void H264Track::onReady(){
@ -169,8 +173,9 @@ Track::Ptr H264Track::clone() {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
}
void H264Track::inputFrame_l(const Frame::Ptr &frame){
bool H264Track::inputFrame_l(const Frame::Ptr &frame){
int type = H264_TYPE( frame->data()[frame->prefixSize()]);
bool ret = true;
switch (type) {
case H264Frame::NAL_SPS: {
_sps = string(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
@ -182,6 +187,7 @@ void H264Track::inputFrame_l(const Frame::Ptr &frame){
}
case H264Frame::NAL_AUD: {
//忽略AUD帧;
ret = false;
break;
}
@ -189,7 +195,7 @@ void H264Track::inputFrame_l(const Frame::Ptr &frame){
if (frame->keyFrame()) {
insertConfigFrame(frame);
}
VideoTrack::inputFrame(frame);
ret = VideoTrack::inputFrame(frame);
break;
}
@ -197,6 +203,7 @@ void H264Track::inputFrame_l(const Frame::Ptr &frame){
if (_width == 0 && ready()) {
onReady();
}
return ret;
}
void H264Track::insertConfigFrame(const Frame::Ptr &frame){

View File

@ -128,13 +128,13 @@ public:
int getVideoHeight() const override;
int getVideoWidth() const override;
float getVideoFps() const override;
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
private:
void onReady();
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
void inputFrame_l(const Frame::Ptr &frame);
bool inputFrame_l(const Frame::Ptr &frame);
void insertConfigFrame(const Frame::Ptr &frame);
private:

View File

@ -155,7 +155,7 @@ void H264RtmpEncoder::makeConfigPacket(){
}
}
void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
bool H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
auto data = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto type = H264_TYPE(data[0]);
@ -183,7 +183,7 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
_rtmp_packet->buffer.resize(5);
}
_merger.inputFrame(frame, [this](uint32_t dts, uint32_t pts, const Buffer::Ptr &, bool have_key_frame) {
return _merger.inputFrame(frame, [this](uint32_t dts, uint32_t pts, const Buffer::Ptr &, bool have_key_frame) {
//flags
_rtmp_packet->buffer[0] = FLV_CODEC_H264 | ((have_key_frame ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
//not config

View File

@ -69,7 +69,7 @@ public:
* 264sps pps
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* config包

View File

@ -265,20 +265,20 @@ void H264RtpEncoder::packRtpStapA(const char *ptr, size_t len, uint32_t pts, boo
RtpCodec::inputRtp(rtp, gop_pos);
}
void H264RtpEncoder::inputFrame(const Frame::Ptr &frame) {
bool H264RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto ptr = frame->data() + frame->prefixSize();
switch (H264_TYPE(ptr[0])) {
case H264Frame::NAL_AUD:
case H264Frame::NAL_SEI : {
return;
return false;
}
case H264Frame::NAL_SPS: {
_sps = Frame::getCacheAbleFrame(frame);
return;
return true;
}
case H264Frame::NAL_PPS: {
_pps = Frame::getCacheAbleFrame(frame);
return;
return true;
}
default: break;
}
@ -288,14 +288,16 @@ void H264RtpEncoder::inputFrame(const Frame::Ptr &frame) {
inputFrame_l(_last_frame, _last_frame->pts() != frame->pts());
}
_last_frame = Frame::getCacheAbleFrame(frame);
return true;
}
void H264RtpEncoder::inputFrame_l(const Frame::Ptr &frame, bool is_mark){
bool H264RtpEncoder::inputFrame_l(const Frame::Ptr &frame, bool is_mark){
if (frame->keyFrame()) {
//保证每一个关键帧前都有SPS与PPS
insertConfigFrame(frame->pts());
}
packRtp(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(), frame->pts(), is_mark, false);
return true;
}
}//namespace mediakit

View File

@ -84,11 +84,11 @@ public:
* 264
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
private:
void insertConfigFrame(uint32_t pts);
void inputFrame_l(const Frame::Ptr &frame, bool is_mark);
bool inputFrame_l(const Frame::Ptr &frame, bool is_mark);
void packRtp(const char *data, size_t len, uint32_t pts, bool is_mark, bool gop_pos);
void packRtpFu(const char *data, size_t len, uint32_t pts, bool is_mark, bool gop_pos);
void packRtpStapA(const char *data, size_t len, uint32_t pts, bool is_mark, bool gop_pos);

View File

@ -89,28 +89,32 @@ bool H265Track::ready() {
return !_vps.empty() && !_sps.empty() && !_pps.empty();
}
void H265Track::inputFrame(const Frame::Ptr &frame) {
using H265FrameInternal = FrameInternal<H265FrameNoCacheAble>;
bool H265Track::inputFrame(const Frame::Ptr &frame) {
int type = H265_TYPE(frame->data()[frame->prefixSize()]);
if (frame->configFrame() || type == H265Frame::NAL_SEI_PREFIX) {
splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, size_t len, size_t prefix) {
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame, (char *) ptr, len, prefix);
inputFrame_l(sub_frame);
});
} else {
inputFrame_l(frame);
if (!frame->configFrame() && type != H265Frame::NAL_SEI_PREFIX) {
return inputFrame_l(frame);
}
bool ret = false;
splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, size_t len, size_t prefix) {
using H265FrameInternal = FrameInternal<H265FrameNoCacheAble>;
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame, (char *) ptr, len, prefix);
if (inputFrame_l(sub_frame)) {
ret = true;
}
});
return ret;
}
void H265Track::inputFrame_l(const Frame::Ptr &frame) {
bool H265Track::inputFrame_l(const Frame::Ptr &frame) {
if (frame->keyFrame()) {
insertConfigFrame(frame);
VideoTrack::inputFrame(frame);
_is_idr = true;
return;
return VideoTrack::inputFrame(frame);
}
_is_idr = false;
bool ret = true;
//非idr帧
switch (H265_TYPE( frame->data()[frame->prefixSize()])) {
case H265Frame::NAL_VPS: {
@ -126,13 +130,14 @@ void H265Track::inputFrame_l(const Frame::Ptr &frame) {
break;
}
default: {
VideoTrack::inputFrame(frame);
ret = VideoTrack::inputFrame(frame);
break;
}
}
if (_width == 0 && ready()) {
onReady();
}
return ret;
}
void H265Track::onReady() {

View File

@ -150,13 +150,13 @@ public:
int getVideoWidth() const override;
int getVideoHeight() const override;
float getVideoFps() const override;
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
private:
void onReady();
Sdp::Ptr getSdp() override;
Track::Ptr clone() override;
void inputFrame_l(const Frame::Ptr &frame);
bool inputFrame_l(const Frame::Ptr &frame);
void insertConfigFrame(const Frame::Ptr &frame);
private:

View File

@ -134,7 +134,7 @@ void H265RtmpEncoder::makeConfigPacket(){
}
}
void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
bool H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
auto data = frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto type = H265_TYPE(data[0]);
@ -169,7 +169,7 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
_rtmp_packet->buffer.resize(5);
}
_merger.inputFrame(frame, [this](uint32_t dts, uint32_t pts, const Buffer::Ptr &, bool have_key_frame) {
return _merger.inputFrame(frame, [this](uint32_t dts, uint32_t pts, const Buffer::Ptr &, bool have_key_frame) {
//flags
_rtmp_packet->buffer[0] = FLV_CODEC_H265 | ((have_key_frame ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
//not config

View File

@ -67,7 +67,7 @@ public:
* 265sps pps
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* config包

View File

@ -254,7 +254,7 @@ H265RtpEncoder::H265RtpEncoder(uint32_t ui32Ssrc,
ui8Interleaved) {
}
void H265RtpEncoder::inputFrame(const Frame::Ptr &frame) {
bool H265RtpEncoder::inputFrame(const Frame::Ptr &frame) {
auto ptr = (uint8_t *) frame->data() + frame->prefixSize();
auto len = frame->size() - frame->prefixSize();
auto pts = frame->pts();
@ -305,6 +305,7 @@ void H265RtpEncoder::inputFrame(const Frame::Ptr &frame) {
} else {
RtpCodec::inputRtp(makeRtp(getTrackType(), ptr, len, false, pts), frame->keyFrame());
}
return len > 0;
}
}//namespace mediakit

View File

@ -86,7 +86,7 @@ public:
* 265
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
};
}//namespace mediakit{

View File

@ -49,15 +49,16 @@ public:
MediaSourceEventInterceptor::onReaderChanged(sender, size);
}
void inputFrame(const Frame::Ptr &frame) override {
bool inputFrame(const Frame::Ptr &frame) override {
GET_CONFIG(bool, fmp4_demand, General::kFMP4Demand);
if (_clear_cache && fmp4_demand) {
_clear_cache = false;
_media_src->clearCache();
}
if (_enabled || !fmp4_demand) {
MP4MuxerMemory::inputFrame(frame);
return MP4MuxerMemory::inputFrame(frame);
}
return false;
}
bool isEnabled() {

View File

@ -296,7 +296,7 @@ vector<Track::Ptr> HlsPlayerImp::getTracks(bool trackReady) const {
return MediaSink::getTracks(trackReady);
}
void HlsPlayerImp::inputFrame(const Frame::Ptr &frame) {
bool HlsPlayerImp::inputFrame(const Frame::Ptr &frame) {
//计算相对时间戳
int64_t dts, pts;
_stamp[frame->getTrackType()].revise(frame->dts(), frame->pts(), dts, pts);
@ -312,6 +312,7 @@ void HlsPlayerImp::inputFrame(const Frame::Ptr &frame) {
//接着播放缓存中最早的帧
setPlayPosition(_frame_cache.begin()->first);
}
return true;
}
int64_t HlsPlayerImp::getPlayPosition(){

View File

@ -135,7 +135,7 @@ private:
void onAllTrackReady() override;
void onPlayResult(const SockException &ex) override;
vector<Track::Ptr> getTracks(bool trackReady = true) const override;
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
void onShutdown(const SockException &ex) override;
void onTick();

View File

@ -106,10 +106,8 @@ float Demuxer::getDuration() const {
return _fDuration;
}
void Demuxer::addTrack(const Track::Ptr &track){
if(_listener){
_listener->addTrack(track);
}
bool Demuxer::addTrack(const Track::Ptr &track){
return _listener ? _listener->addTrack(track) : false;
}
void Demuxer::addTrackCompleted(){

View File

@ -299,7 +299,7 @@ public:
void setTrackListener(TrackListener *listener);
protected:
void addTrack(const Track::Ptr &track) override;
bool addTrack(const Track::Ptr &track) override;
void addTrackCompleted() override;
void resetTracks() override;

View File

@ -19,33 +19,6 @@ using namespace toolkit;
namespace mediakit {
static uint8_t s_mute_adts[] = {0xff, 0xf1, 0x6c, 0x40, 0x2d, 0x3f, 0xfc, 0x00, 0xe0, 0x34, 0x20, 0xad, 0xf2, 0x3f, 0xb5, 0xdd,
0x73, 0xac, 0xbd, 0xca, 0xd7, 0x7d, 0x4a, 0x13, 0x2d, 0x2e, 0xa2, 0x62, 0x02, 0x70, 0x3c, 0x1c,
0xc5, 0x63, 0x55, 0x69, 0x94, 0xb5, 0x8d, 0x70, 0xd7, 0x24, 0x6a, 0x9e, 0x2e, 0x86, 0x24, 0xea,
0x4f, 0xd4, 0xf8, 0x10, 0x53, 0xa5, 0x4a, 0xb2, 0x9a, 0xf0, 0xa1, 0x4f, 0x2f, 0x66, 0xf9, 0xd3,
0x8c, 0xa6, 0x97, 0xd5, 0x84, 0xac, 0x09, 0x25, 0x98, 0x0b, 0x1d, 0x77, 0x04, 0xb8, 0x55, 0x49,
0x85, 0x27, 0x06, 0x23, 0x58, 0xcb, 0x22, 0xc3, 0x20, 0x3a, 0x12, 0x09, 0x48, 0x24, 0x86, 0x76,
0x95, 0xe3, 0x45, 0x61, 0x43, 0x06, 0x6b, 0x4a, 0x61, 0x14, 0x24, 0xa9, 0x16, 0xe0, 0x97, 0x34,
0xb6, 0x58, 0xa4, 0x38, 0x34, 0x90, 0x19, 0x5d, 0x00, 0x19, 0x4a, 0xc2, 0x80, 0x4b, 0xdc, 0xb7,
0x00, 0x18, 0x12, 0x3d, 0xd9, 0x93, 0xee, 0x74, 0x13, 0x95, 0xad, 0x0b, 0x59, 0x51, 0x0e, 0x99,
0xdf, 0x49, 0x98, 0xde, 0xa9, 0x48, 0x4b, 0xa5, 0xfb, 0xe8, 0x79, 0xc9, 0xe2, 0xd9, 0x60, 0xa5,
0xbe, 0x74, 0xa6, 0x6b, 0x72, 0x0e, 0xe3, 0x7b, 0x28, 0xb3, 0x0e, 0x52, 0xcc, 0xf6, 0x3d, 0x39,
0xb7, 0x7e, 0xbb, 0xf0, 0xc8, 0xce, 0x5c, 0x72, 0xb2, 0x89, 0x60, 0x33, 0x7b, 0xc5, 0xda, 0x49,
0x1a, 0xda, 0x33, 0xba, 0x97, 0x9e, 0xa8, 0x1b, 0x6d, 0x5a, 0x77, 0xb6, 0xf1, 0x69, 0x5a, 0xd1,
0xbd, 0x84, 0xd5, 0x4e, 0x58, 0xa8, 0x5e, 0x8a, 0xa0, 0xc2, 0xc9, 0x22, 0xd9, 0xa5, 0x53, 0x11,
0x18, 0xc8, 0x3a, 0x39, 0xcf, 0x3f, 0x57, 0xb6, 0x45, 0x19, 0x1e, 0x8a, 0x71, 0xa4, 0x46, 0x27,
0x9e, 0xe9, 0xa4, 0x86, 0xdd, 0x14, 0xd9, 0x4d, 0xe3, 0x71, 0xe3, 0x26, 0xda, 0xaa, 0x17, 0xb4,
0xac, 0xe1, 0x09, 0xc1, 0x0d, 0x75, 0xba, 0x53, 0x0a, 0x37, 0x8b, 0xac, 0x37, 0x39, 0x41, 0x27,
0x6a, 0xf0, 0xe9, 0xb4, 0xc2, 0xac, 0xb0, 0x39, 0x73, 0x17, 0x64, 0x95, 0xf4, 0xdc, 0x33, 0xbb,
0x84, 0x94, 0x3e, 0xf8, 0x65, 0x71, 0x60, 0x7b, 0xd4, 0x5f, 0x27, 0x79, 0x95, 0x6a, 0xba, 0x76,
0xa6, 0xa5, 0x9a, 0xec, 0xae, 0x55, 0x3a, 0x27, 0x48, 0x23, 0xcf, 0x5c, 0x4d, 0xbc, 0x0b, 0x35,
0x5c, 0xa7, 0x17, 0xcf, 0x34, 0x57, 0xc9, 0x58, 0xc5, 0x20, 0x09, 0xee, 0xa5, 0xf2, 0x9c, 0x6c,
0x39, 0x1a, 0x77, 0x92, 0x9b, 0xff, 0xc6, 0xae, 0xf8, 0x36, 0xba, 0xa8, 0xaa, 0x6b, 0x1e, 0x8c,
0xc5, 0x97, 0x39, 0x6a, 0xb8, 0xa2, 0x55, 0xa8, 0xf8};
#define MUTE_ADTS_DATA s_mute_adts
#define MUTE_ADTS_DATA_LEN sizeof(s_mute_adts)
#define MUTE_ADTS_DATA_MS 130
PlayerProxy::PlayerProxy(const string &vhost, const string &app, const string &stream_id,
bool enable_hls, bool enable_mp4, int retry_count, const EventPoller::Ptr &poller)
: MediaPlayer(poller) {
@ -206,40 +179,6 @@ std::shared_ptr<SockInfo> PlayerProxy::getOriginSock(MediaSource &sender) const
return getSockInfo();
}
class MuteAudioMaker : public FrameDispatcher {
public:
typedef std::shared_ptr<MuteAudioMaker> Ptr;
MuteAudioMaker() {};
~MuteAudioMaker() override {}
void inputFrame(const Frame::Ptr &frame) override {
if (frame->getTrackType() == TrackVideo) {
auto audio_idx = frame->dts() / MUTE_ADTS_DATA_MS;
if (_audio_idx != audio_idx) {
_audio_idx = audio_idx;
auto aacFrame = std::make_shared<FrameFromStaticPtr>(CodecAAC, (char *)MUTE_ADTS_DATA, MUTE_ADTS_DATA_LEN, _audio_idx * MUTE_ADTS_DATA_MS, 0 ,ADTS_HEADER_LEN);
FrameDispatcher::inputFrame(aacFrame);
}
}
}
private:
class FrameFromStaticPtr : public FrameFromPtr {
public:
template<typename ... ARGS>
FrameFromStaticPtr(ARGS &&...args) : FrameFromPtr(std::forward<ARGS>(args)...) {};
~FrameFromStaticPtr() override = default;
bool cacheAble() const override {
return true;
}
};
private:
uint32_t _audio_idx = 0;
};
void PlayerProxy::onPlaySuccess() {
GET_CONFIG(bool, resetWhenRePlay, General::kResetWhenRePlay);
if (dynamic_pointer_cast<RtspMediaSource>(_pMediaSrc)) {
@ -268,24 +207,12 @@ void PlayerProxy::onPlaySuccess() {
videoTrack->addDelegate(_muxer);
}
//是否添加静音音频
GET_CONFIG(bool, addMuteAudio, General::kAddMuteAudio);
auto audioTrack = getTrack(TrackAudio, false);
if (audioTrack) {
//添加音频
_muxer->addTrack(audioTrack);
//音频数据写入_mediaMuxer
audioTrack->addDelegate(_muxer);
} else if (addMuteAudio && videoTrack) {
//没有音频信息,产生一个静音音频
MuteAudioMaker::Ptr audioMaker = std::make_shared<MuteAudioMaker>();
//videoTrack把数据写入MuteAudioMaker
videoTrack->addDelegate(audioMaker);
//添加一个静音Track至_mediaMuxer
_muxer->addTrack(std::make_shared<AACTrack>());
//MuteAudioMaker生成静音音频然后写入_mediaMuxer
audioMaker->addDelegate(_muxer);
}
//添加完毕所有track防止单track情况下最大等待3秒

View File

@ -61,15 +61,16 @@ public:
return hls_demand ? (_clear_cache ? true : _enabled) : true;
}
void inputFrame(const Frame::Ptr &frame) override {
bool inputFrame(const Frame::Ptr &frame) override {
GET_CONFIG(bool, hls_demand, General::kHlsDemand);
if (_clear_cache && hls_demand) {
_clear_cache = false;
_hls->clearCache();
}
if (_enabled || !hls_demand) {
TsMuxer::inputFrame(frame);
return TsMuxer::inputFrame(frame);
}
return false;
}
private:

View File

@ -64,18 +64,18 @@ void MP4MuxerInterface::resetTracks() {
_codec_to_trackid.clear();
}
void MP4MuxerInterface::inputFrame(const Frame::Ptr &frame) {
bool MP4MuxerInterface::inputFrame(const Frame::Ptr &frame) {
auto it = _codec_to_trackid.find(frame->getCodecId());
if(it == _codec_to_trackid.end()){
//该Track不存在或初始化失败
return;
return false;
}
if (!_started) {
//该逻辑确保含有视频时,第一帧为关键帧
if (_have_video && !frame->keyFrame()) {
//含有视频,但是不是关键帧,那么前面的帧丢弃
return;
return false;
}
//开始写文件
_started = true;
@ -113,8 +113,8 @@ void MP4MuxerInterface::inputFrame(const Frame::Ptr &frame) {
frame->keyFrame() ? MOV_AV_FLAG_KEYFREAME : 0);
break;
}
}
return true;
}
static uint8_t getObject(CodecId codecId){
@ -149,19 +149,19 @@ void MP4MuxerInterface::stampSync(){
}
}
void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
bool MP4MuxerInterface::addTrack(const Track::Ptr &track) {
if (!_mov_writter) {
_mov_writter = createWriter();
}
auto mp4_object = getObject(track->getCodecId());
if (!mp4_object) {
WarnL << "MP4录制不支持该编码格式:" << track->getCodecName();
return;
return false;
}
if (!track->ready()) {
WarnL << "Track[" << track->getCodecName() << "]未就绪";
return;
return false;
}
switch (track->getCodecId()) {
@ -171,7 +171,7 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if (!audio_track) {
WarnL << "不是音频Track:" << track->getCodecName();
return;
return false;
}
auto track_id = mp4_writer_add_audio(_mov_writter.get(),
@ -182,17 +182,17 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
nullptr, 0);
if (track_id < 0) {
WarnL << "添加Track[" << track->getCodecName() << "]失败:" << track_id;
return;
return false;
}
_codec_to_trackid[track->getCodecId()].track_id = track_id;
}
break;
}
case CodecAAC: {
auto audio_track = dynamic_pointer_cast<AACTrack>(track);
if (!audio_track) {
WarnL << "不是AAC Track";
return;
return false;
}
auto track_id = mp4_writer_add_audio(_mov_writter.get(),
@ -204,16 +204,17 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
audio_track->getAacCfg().size());
if(track_id < 0){
WarnL << "添加AAC Track失败:" << track_id;
return;
return false;
}
_codec_to_trackid[track->getCodecId()].track_id = track_id;
}
break;
}
case CodecH264: {
auto h264_track = dynamic_pointer_cast<H264Track>(track);
if (!h264_track) {
WarnL << "不是H264 Track";
return;
return false;
}
struct mpeg4_avc_t avc = {0};
@ -225,7 +226,7 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
int extra_data_size = mpeg4_avc_decoder_configuration_record_save(&avc, extra_data, sizeof(extra_data));
if (extra_data_size == -1) {
WarnL << "生成H264 extra_data 失败";
return;
return false;
}
auto track_id = mp4_writer_add_video(_mov_writter.get(),
@ -237,17 +238,18 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
if(track_id < 0){
WarnL << "添加H264 Track失败:" << track_id;
return;
return false;
}
_codec_to_trackid[track->getCodecId()].track_id = track_id;
_have_video = true;
}
break;
}
case CodecH265: {
auto h265_track = dynamic_pointer_cast<H265Track>(track);
if (!h265_track) {
WarnL << "不是H265 Track";
return;
return false;
}
struct mpeg4_hevc_t hevc = {0};
@ -260,7 +262,7 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
int extra_data_size = mpeg4_hevc_decoder_configuration_record_save(&hevc, extra_data, sizeof(extra_data));
if (extra_data_size == -1) {
WarnL << "生成H265 extra_data 失败";
return;
return false;
}
auto track_id = mp4_writer_add_video(_mov_writter.get(),
@ -271,18 +273,19 @@ void MP4MuxerInterface::addTrack(const Track::Ptr &track) {
extra_data_size);
if(track_id < 0){
WarnL << "添加H265 Track失败:" << track_id;
return;
return false;
}
_codec_to_trackid[track->getCodecId()].track_id = track_id;
_have_video = true;
}
break;
}
default: WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); break;
default: WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); return false;
}
//尝试音视频同步
stampSync();
return true;
}
/////////////////////////////////////////// MP4MuxerMemory /////////////////////////////////////////////
@ -310,10 +313,10 @@ void MP4MuxerMemory::resetTracks(){
_init_segment.clear();
}
void MP4MuxerMemory::inputFrame(const Frame::Ptr &frame){
bool MP4MuxerMemory::inputFrame(const Frame::Ptr &frame){
if (_init_segment.empty()) {
//尚未生成init segment
return;
return false;
}
bool key_frame = frame->keyFrame();
@ -330,7 +333,7 @@ void MP4MuxerMemory::inputFrame(const Frame::Ptr &frame){
if (key_frame) {
_key_frame = true;
}
MP4MuxerInterface::inputFrame(frame);
return MP4MuxerInterface::inputFrame(frame);
}

View File

@ -31,12 +31,12 @@ public:
/**
* ready状态的track
*/
void addTrack(const Track::Ptr &track) override;
bool addTrack(const Track::Ptr &track) override;
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* track
@ -120,7 +120,7 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* fmp4 init segment

View File

@ -105,7 +105,7 @@ void MP4Recorder::closeFile() {
}
}
void MP4Recorder::inputFrame(const Frame::Ptr &frame) {
bool MP4Recorder::inputFrame(const Frame::Ptr &frame) {
if (_baseSec == 0) {
_baseSec = frame->dts();
}
@ -122,16 +122,18 @@ void MP4Recorder::inputFrame(const Frame::Ptr &frame) {
if (_muxer) {
//生成mp4文件
_muxer->inputFrame(frame);
return _muxer->inputFrame(frame);
}
return false;
}
void MP4Recorder::addTrack(const Track::Ptr & track){
bool MP4Recorder::addTrack(const Track::Ptr & track){
//保存所有的track为创建MP4MuxerFile做准备
_tracks.emplace_back(track);
if(track->getTrackType() == TrackVideo){
_haveVideo = true;
}
return true;
}
void MP4Recorder::resetTracks() {

View File

@ -45,12 +45,13 @@ public:
/**
* frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* ready状态的track
*/
void addTrack(const Track::Ptr & track) override;
bool addTrack(const Track::Ptr & track) override;
private:
void createFile();
void closeFile();

View File

@ -44,7 +44,7 @@ void TsMuxer::stampSync(){
}
}
void TsMuxer::addTrack(const Track::Ptr &track) {
bool TsMuxer::addTrack(const Track::Ptr &track) {
switch (track->getCodecId()) {
case CodecH264: {
_have_video = true;
@ -78,17 +78,18 @@ void TsMuxer::addTrack(const Track::Ptr &track) {
break;
}
default: WarnL << "mpeg-ts 不支持该编码格式,已忽略:" << track->getCodecName(); break;
default: WarnL << "mpeg-ts 不支持该编码格式,已忽略:" << track->getCodecName(); return false;
}
//尝试音视频同步
stampSync();
return true;
}
void TsMuxer::inputFrame(const Frame::Ptr &frame) {
bool TsMuxer::inputFrame(const Frame::Ptr &frame) {
auto it = _codec_to_trackid.find(frame->getCodecId());
if (it == _codec_to_trackid.end()) {
return;
return false;
}
auto &track_info = it->second;
int64_t dts_out, pts_out;
@ -97,7 +98,7 @@ void TsMuxer::inputFrame(const Frame::Ptr &frame) {
case CodecH264:
case CodecH265: {
//这里的代码逻辑是让SPS、PPS、IDR这些时间戳相同的帧打包到一起当做一个帧处理
_frame_merger.inputFrame(frame, [&](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer, bool have_idr){
return _frame_merger.inputFrame(frame, [&](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer, bool have_idr){
track_info.stamp.revise(dts, pts, dts_out, pts_out);
//取视频时间戳为TS的时间戳
_timestamp = (uint32_t) dts_out;
@ -106,13 +107,12 @@ void TsMuxer::inputFrame(const Frame::Ptr &frame) {
pts_out * 90LL, dts_out * 90LL, buffer->data(), buffer->size());
flushCache();
});
break;
}
case CodecAAC: {
if (frame->prefixSize() == 0) {
WarnL << "必须提供adts头才能mpeg-ts打包";
break;
return false;
}
}
@ -125,7 +125,7 @@ void TsMuxer::inputFrame(const Frame::Ptr &frame) {
mpeg_ts_write(_context, track_info.track_id, frame->keyFrame() ? 0x0001 : 0,
pts_out * 90LL, dts_out * 90LL, frame->data(), frame->size());
flushCache();
break;
return true;
}
}
}

View File

@ -30,7 +30,7 @@ public:
/**
*
*/
void addTrack(const Track::Ptr &track) override;
bool addTrack(const Track::Ptr &track) override;
/**
*
@ -40,7 +40,7 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
protected:
/**

View File

@ -97,12 +97,15 @@ public:
/**
* _demuxer触发的添加Track事件
*/
void addTrack(const Track::Ptr &track) override {
bool addTrack(const Track::Ptr &track) override {
if (_muxer) {
_muxer->addTrack(track);
if (_muxer->addTrack(track)) {
track->addDelegate(_muxer);
return true;
}
}
return false;
}
/**
* _demuxer触发的Track添加完毕事件

View File

@ -58,15 +58,16 @@ public:
MediaSourceEventInterceptor::onReaderChanged(sender, size);
}
void inputFrame(const Frame::Ptr &frame) override {
bool inputFrame(const Frame::Ptr &frame) override {
GET_CONFIG(bool, rtmp_demand, General::kRtmpDemand);
if (_clear_cache && rtmp_demand) {
_clear_cache = false;
_media_src->clearCache();
}
if (_enabled || !rtmp_demand) {
RtmpMuxer::inputFrame(frame);
return RtmpMuxer::inputFrame(frame);
}
return false;
}
bool isEnabled() {

View File

@ -22,12 +22,12 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) {
_rtmp_ring = std::make_shared<RtmpRing::RingType>();
}
void RtmpMuxer::addTrack(const Track::Ptr &track) {
bool RtmpMuxer::addTrack(const Track::Ptr &track) {
auto &encoder = _encoder[track->getTrackType()];
//生成rtmp编码器,克隆该Track防止循环引用
encoder = Factory::getRtmpCodecByTrack(track->clone(), true);
if (!encoder) {
return;
return false;
}
//设置rtmp输出环形缓存
@ -35,13 +35,12 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) {
//添加metadata
Metadata::addTrack(_metadata, track);
return true;
}
void RtmpMuxer::inputFrame(const Frame::Ptr &frame) {
bool RtmpMuxer::inputFrame(const Frame::Ptr &frame) {
auto &encoder = _encoder[frame->getTrackType()];
if(encoder){
encoder->inputFrame(frame);
}
return encoder ? encoder->inputFrame(frame) : false;
}
void RtmpMuxer::makeConfigPacket(){

View File

@ -43,13 +43,13 @@ public:
/**
* ready状态的track
*/
void addTrack(const Track::Ptr & track) override;
bool addTrack(const Track::Ptr & track) override;
/**
*
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* track

View File

@ -149,6 +149,7 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) {
//设置frame回调
_rtp_decoder[pt]->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) {
onRtpDecode(frame);
return true;
}));
}

View File

@ -48,7 +48,7 @@ void PSEncoder::init() {
});
}
void PSEncoder::addTrack(const Track::Ptr &track) {
bool PSEncoder::addTrack(const Track::Ptr &track) {
switch (track->getCodecId()) {
case CodecH264: {
_codec_to_trackid[track->getCodecId()].track_id = ps_muxer_add_stream(_muxer.get(), STREAM_VIDEO_H264, nullptr, 0);
@ -80,10 +80,11 @@ void PSEncoder::addTrack(const Track::Ptr &track) {
break;
}
default: WarnL << "mpeg-ps 不支持该编码格式,已忽略:" << track->getCodecName(); break;
default: WarnL << "mpeg-ps 不支持该编码格式,已忽略:" << track->getCodecName(); return false;
}
//尝试音视频同步
stampSync();
return true;
}
void PSEncoder::stampSync(){
@ -110,10 +111,10 @@ void PSEncoder::resetTracks() {
init();
}
void PSEncoder::inputFrame(const Frame::Ptr &frame) {
bool PSEncoder::inputFrame(const Frame::Ptr &frame) {
auto it = _codec_to_trackid.find(frame->getCodecId());
if (it == _codec_to_trackid.end()) {
return;
return false;
}
auto &track_info = it->second;
int64_t dts_out, pts_out;
@ -121,20 +122,19 @@ void PSEncoder::inputFrame(const Frame::Ptr &frame) {
case CodecH264:
case CodecH265: {
//这里的代码逻辑是让SPS、PPS、IDR这些时间戳相同的帧打包到一起当做一个帧处理
_frame_merger.inputFrame(frame, [&](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer, bool have_idr) {
return _frame_merger.inputFrame(frame, [&](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer, bool have_idr) {
track_info.stamp.revise(dts, pts, dts_out, pts_out);
//取视频时间戳为TS的时间戳
_timestamp = (uint32_t) pts_out;
ps_muxer_input(_muxer.get(), track_info.track_id, have_idr ? 0x0001 : 0,
pts_out * 90LL, dts_out * 90LL, buffer->data(), buffer->size());
});
break;
}
case CodecAAC: {
if (frame->prefixSize() == 0) {
WarnL << "必须提供adts头才能mpeg-ps打包";
break;
return false;
}
}
@ -143,7 +143,7 @@ void PSEncoder::inputFrame(const Frame::Ptr &frame) {
_timestamp = (uint32_t) dts_out;
ps_muxer_input(_muxer.get(), track_info.track_id, frame->keyFrame() ? 0x0001 : 0, pts_out * 90LL,
dts_out * 90LL, frame->data(), frame->size());
break;
return true;
}
}
}

View File

@ -26,7 +26,7 @@ public:
/**
*
*/
void addTrack(const Track::Ptr &track) override;
bool addTrack(const Track::Ptr &track) override;
/**
*
@ -36,7 +36,7 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
protected:
/**

View File

@ -106,18 +106,18 @@ bool RtpProcess::inputRtp(bool is_udp, const Socket::Ptr &sock, const char *data
return ret;
}
void RtpProcess::inputFrame(const Frame::Ptr &frame) {
bool RtpProcess::inputFrame(const Frame::Ptr &frame) {
_dts = frame->dts();
if (_save_file_video && frame->getTrackType() == TrackVideo) {
fwrite((uint8_t *) frame->data(), frame->size(), 1, _save_file_video.get());
}
if (_muxer) {
_last_frame_time.resetTime();
_muxer->inputFrame(frame);
} else {
return _muxer->inputFrame(frame);
}
if (_cached_func.size() > kMaxCachedFrame) {
WarnL << "cached frame of track(" << frame->getCodecName() << ") is too much, now dropped";
return;
return false;
}
auto frame_cached = Frame::getCacheAbleFrame(frame);
lock_guard<recursive_mutex> lck(_func_mtx);
@ -125,18 +125,19 @@ void RtpProcess::inputFrame(const Frame::Ptr &frame) {
_last_frame_time.resetTime();
_muxer->inputFrame(frame_cached);
});
}
return true;
}
void RtpProcess::addTrack(const Track::Ptr &track) {
bool RtpProcess::addTrack(const Track::Ptr &track) {
if (_muxer) {
_muxer->addTrack(track);
} else {
return _muxer->addTrack(track);
}
lock_guard<recursive_mutex> lck(_func_mtx);
_cached_func.emplace_back([this, track]() {
_muxer->addTrack(track);
});
}
return true;
}
void RtpProcess::addTrackCompleted() {

View File

@ -67,8 +67,8 @@ public:
void setListener(const std::weak_ptr<MediaSourceEvent> &listener);
protected:
void inputFrame(const Frame::Ptr &frame) override;
void addTrack(const Track::Ptr & track) override;
bool inputFrame(const Frame::Ptr &frame) override;
bool addTrack(const Track::Ptr & track) override;
void addTrackCompleted() override;
void resetTracks() override {};

View File

@ -97,8 +97,8 @@ void RtpSender::onConnect(){
InfoL << "开始发送 rtp:" << _socket->get_peer_ip() << ":" << _socket->get_peer_port() << ", 是否为udp方式:" << _is_udp;
}
void RtpSender::addTrack(const Track::Ptr &track){
_interface->addTrack(track);
bool RtpSender::addTrack(const Track::Ptr &track){
return _interface->addTrack(track);
}
void RtpSender::addTrackCompleted(){
@ -110,11 +110,9 @@ void RtpSender::resetTracks(){
}
//此函数在其他线程执行
void RtpSender::inputFrame(const Frame::Ptr &frame) {
if (_is_connect) {
bool RtpSender::inputFrame(const Frame::Ptr &frame) {
//连接成功后才做实质操作(节省cpu资源)
_interface->inputFrame(frame);
}
return _is_connect ? _interface->inputFrame(frame) : false;
}
//此函数在其他线程执行

View File

@ -42,14 +42,14 @@ public:
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* trackTrack的clone方法
* sps pps这些信息 Delegate相关关系
* @param track
*/
virtual void addTrack(const Track::Ptr & track) override;
virtual bool addTrack(const Track::Ptr & track) override;
/**
* Track完毕

View File

@ -94,12 +94,15 @@ public:
/**
* _demuxer触发的添加Track事件
*/
void addTrack(const Track::Ptr &track) override {
bool addTrack(const Track::Ptr &track) override {
if (_muxer) {
_muxer->addTrack(track);
if (_muxer->addTrack(track)) {
track->addDelegate(_muxer);
return true;
}
}
return false;
}
/**
* _demuxer触发的Track添加完毕事件

View File

@ -57,15 +57,16 @@ public:
MediaSourceEventInterceptor::onReaderChanged(sender, size);
}
void inputFrame(const Frame::Ptr &frame) override {
bool inputFrame(const Frame::Ptr &frame) override {
GET_CONFIG(bool, rtsp_demand, General::kRtspDemand);
if (_clear_cache && rtsp_demand) {
_clear_cache = false;
_media_src->clearCache();
}
if (_enabled || !rtsp_demand) {
RtspMuxer::inputFrame(frame);
return RtspMuxer::inputFrame(frame);
}
return false;
}
bool isEnabled() {

View File

@ -43,17 +43,17 @@ RtspMuxer::RtspMuxer(const TitleSdp::Ptr &title) {
_ntp_stamp_start = getCurrentMillisecond(true);
}
void RtspMuxer::addTrack(const Track::Ptr &track) {
bool RtspMuxer::addTrack(const Track::Ptr &track) {
//根据track生成sdp
Sdp::Ptr sdp = track->getSdp();
if (!sdp) {
return;
return false;
}
auto &encoder = _encoder[track->getTrackType()];
encoder = Factory::getRtpEncoderBySdp(sdp);
if (!encoder) {
return;
return false;
}
//设置rtp输出环形缓存
@ -62,6 +62,7 @@ void RtspMuxer::addTrack(const Track::Ptr &track) {
//添加其sdp
_sdp.append(sdp->getSdp());
trySyncTrack();
return true;
}
void RtspMuxer::trySyncTrack() {
@ -71,11 +72,9 @@ void RtspMuxer::trySyncTrack() {
}
}
void RtspMuxer::inputFrame(const Frame::Ptr &frame) {
bool RtspMuxer::inputFrame(const Frame::Ptr &frame) {
auto &encoder = _encoder[frame->getTrackType()];
if (encoder) {
encoder->inputFrame(frame);
}
return encoder ? encoder->inputFrame(frame) : false;
}
string RtspMuxer::getSdp() {

View File

@ -63,13 +63,13 @@ public:
/**
* ready状态的track
*/
void addTrack(const Track::Ptr & track) override;
bool addTrack(const Track::Ptr & track) override;
/**
*
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override;
bool inputFrame(const Frame::Ptr &frame) override;
/**
* track

View File

@ -47,15 +47,16 @@ public:
MediaSourceEventInterceptor::onReaderChanged(sender, size);
}
void inputFrame(const Frame::Ptr &frame) override {
bool inputFrame(const Frame::Ptr &frame) override {
GET_CONFIG(bool, ts_demand, General::kTSDemand);
if (_clear_cache && ts_demand) {
_clear_cache = false;
_media_src->clearCache();
}
if (_enabled || !ts_demand) {
TsMuxer::inputFrame(frame);
return TsMuxer::inputFrame(frame);
}
return false;
}
bool isEnabled() {