重构 audio_transcode 代码:

- 独立出 RtcMediaSource,并只对rtc开放
- 增加Rtc g711转码开关
- 修改说明
This commit is contained in:
cqm 2022-11-29 20:38:59 +08:00
parent 2adc12c4ab
commit f4b2fd9c05
8 changed files with 281 additions and 135 deletions

View File

@ -50,9 +50,9 @@ auto_close=0
#此参数不应大于播放器超时时间;单位毫秒 #此参数不应大于播放器超时时间;单位毫秒
continue_push_ms=15000 continue_push_ms=15000
# 是否启用音频转码 # 是否启用音频转码
# 转码主要基于包过滤机制,代码实现详见 MultiMediaSourceMuxer 的 onTrackReady 和 onTrackFrame 方法,当前已实现 # 主要实现进出RTC音频流的自动转码代码实现详见 RtcMediaSource.h/cpp当前实现
# 1. AAC转Opus, 然后传入rtc # 1. 将RTC收到的Opus音频流转成AAC流并传到MultiMediaSourceMuxer中的其他流
# 2. 非AAC音频转aac然后传入rtmp # 2. 将从MultiMediaSourceMuxer中收到的AAC音频流转成OPUS并送到RTC流
# 音频转码底层使用FFMPEG来实现需要打开FFMPEG, 即编译时必须指定 -DENABLE_FFMPEG=1, 当前已知支持FFMPEG 4.x 5.x 和 6.0 # 音频转码底层使用FFMPEG来实现需要打开FFMPEG, 即编译时必须指定 -DENABLE_FFMPEG=1, 当前已知支持FFMPEG 4.x 5.x 和 6.0
# 在ubuntu中可通过以下指令来安装: apt-get install libavcodec-dev libavutil-dev libswscale-dev libresample-dev # 在ubuntu中可通过以下指令来安装: apt-get install libavcodec-dev libavutil-dev libswscale-dev libresample-dev
# 此外音频转码正常都是用于webrtc的一般也会开启WEBRTC, 即-DENABLE_WEBRTC=1, 此前必须自己装好libsrtp库, 安装过程详见wiki # 此外音频转码正常都是用于webrtc的一般也会开启WEBRTC, 即-DENABLE_WEBRTC=1, 此前必须自己装好libsrtp库, 安装过程详见wiki
@ -354,6 +354,10 @@ preferredCodecA=PCMU,PCMA,opus,mpeg4-generic
#rtc支持的视频codec类型,在前面的优先级更高 #rtc支持的视频codec类型,在前面的优先级更高
#以下范例为所有支持的视频codec #以下范例为所有支持的视频codec
preferredCodecV=H264,H265,AV1,VP9,VP8 preferredCodecV=H264,H265,AV1,VP9,VP8
# 是否开启RTC协议的G711转码开启后
# 能将传给rtc的g711音频转成opus
# 将由rtc流入g711音频转成aac并转给其他协议流
transcodeG711=0
[srt] [srt]
#srt播放推流、播放超时时间,单位秒 #srt播放推流、播放超时时间,单位秒

View File

@ -11,9 +11,6 @@
#include <math.h> #include <math.h>
#include "Common/config.h" #include "Common/config.h"
#include "MultiMediaSourceMuxer.h" #include "MultiMediaSourceMuxer.h"
#include "Extension/AAC.h"
#include "Extension/Opus.h"
#include "Extension/G711.h"
#include "Rtp/RtpSender.h" #include "Rtp/RtpSender.h"
#include "Record/HlsRecorder.h" #include "Record/HlsRecorder.h"
#include "Record/HlsMediaSource.h" #include "Record/HlsMediaSource.h"
@ -21,9 +18,8 @@
#include "Rtmp/RtmpMediaSourceMuxer.h" #include "Rtmp/RtmpMediaSourceMuxer.h"
#include "TS/TSMediaSourceMuxer.h" #include "TS/TSMediaSourceMuxer.h"
#include "FMP4/FMP4MediaSourceMuxer.h" #include "FMP4/FMP4MediaSourceMuxer.h"
#if defined(ENABLE_WEBRTC)
#ifdef ENABLE_FFMPEG #include "webrtc/RtcMediaSource.h"
#include "Codec/Transcode.h"
#endif #endif
using namespace std; using namespace std;
using namespace toolkit; using namespace toolkit;
@ -122,7 +118,7 @@ MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_
} }
if (option.enable_rtc) { if (option.enable_rtc) {
#if defined(ENABLE_WEBRTC) #if defined(ENABLE_WEBRTC)
_rtc = std::make_shared<RtspMediaSourceMuxer>(_tuple, option, std::make_shared<TitleSdp>(dur_sec), RTC_SCHEMA); _rtc = std::make_shared<RtcMediaSourceMuxer>(_tuple, option, std::make_shared<TitleSdp>(dur_sec));
#endif #endif
} }
if (option.enable_hls) { if (option.enable_hls) {
@ -137,14 +133,6 @@ MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_
if (option.enable_ts) { if (option.enable_ts) {
_ts = dynamic_pointer_cast<TSMediaSourceMuxer>(Recorder::createRecorder(Recorder::type_ts, _tuple, option)); _ts = dynamic_pointer_cast<TSMediaSourceMuxer>(Recorder::createRecorder(Recorder::type_ts, _tuple, option));
} }
if (option.audio_transcode) {
#if defined(ENABLE_FFMPEG)
InfoL << "enable audio_transcode";
#else
InfoL << "without ffmpeg disable audio_transcode";
_option.audio_transcode = false;
#endif
}
if (option.enable_fmp4) { if (option.enable_fmp4) {
_fmp4 = dynamic_pointer_cast<FMP4MediaSourceMuxer>(Recorder::createRecorder(Recorder::type_fmp4, _tuple, option)); _fmp4 = dynamic_pointer_cast<FMP4MediaSourceMuxer>(Recorder::createRecorder(Recorder::type_fmp4, _tuple, option));
@ -407,79 +395,9 @@ std::shared_ptr<MultiMediaSourceMuxer> MultiMediaSourceMuxer::getMuxer(MediaSour
bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) { bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {
bool ret = false; bool ret = false;
auto rtmp = _rtmp; if (_rtc && _rtc->addTrack(track))
auto rtc = _rtc;
#if defined(ENABLE_FFMPEG)
if (_option.audio_transcode) {
if (track->getCodecId() == CodecAAC) {
if (rtmp) {
rtmp->addTrack(track);
rtmp = nullptr;
}
_audio_dec = nullptr;
_audio_enc = nullptr;
_opus_mute_maker = nullptr;
if (rtc) {
Track::Ptr newTrack(new OpusTrack());
GET_CONFIG(int, bitrate, General::kOpusBitrate);
newTrack->setBitRate(bitrate);
rtc->addTrack(newTrack);
rtc = nullptr;
if (!hasMuteAudio()) {
// aac to opus
_audio_dec.reset(new FFmpegDecoder(track));
_audio_enc.reset(new FFmpegEncoder(newTrack));
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
_audio_enc->inputFrame(frame, false);
});
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
// fill data to _rtc
if (_rtc && _rtc->isEnabled())
_rtc->inputFrame(frame);
});
}
else {
_opus_mute_maker = std::make_shared<MuteAudioMaker>(CodecOpus);
_opus_mute_maker->addDelegate([this](const Frame::Ptr &frame) {
if (_rtc && _rtc->isEnabled())
_rtc->inputFrame(frame);
return true;
});
}
}
}
else if (track->getTrackType() == TrackAudio) {
if (rtc) {
rtc->addTrack(track);
rtc = nullptr;
}
_audio_dec = nullptr;
_audio_enc = nullptr;
_opus_mute_maker = nullptr;
if (rtmp) {
Track::Ptr newTrack(new AACTrack(44100, std::dynamic_pointer_cast<AudioTrack>(track)->getAudioChannel()));
GET_CONFIG(int, bitrate, General::kAacBitrate);
newTrack->setBitRate(bitrate);
rtmp->addTrack(newTrack);
rtmp = nullptr;
_audio_dec.reset(new FFmpegDecoder(track));
_audio_enc.reset(new FFmpegEncoder(newTrack));
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
_audio_enc->inputFrame(frame, false);
});
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
// fill aac frame to rtmp
if (_rtmp && _rtmp->isEnabled())
_rtmp->inputFrame(frame);
});
}
}
}
#endif
if (rtc && rtc->addTrack(track))
ret = true; ret = true;
if (rtmp && rtmp->addTrack(track)) if (_rtmp && _rtmp->addTrack(track))
ret = true; ret = true;
if (_rtsp && _rtsp->addTrack(track)) if (_rtsp && _rtsp->addTrack(track))
ret = true; ret = true;
@ -579,11 +497,6 @@ void MultiMediaSourceMuxer::resetTracks() {
if (_rtc) { if (_rtc) {
_rtc->resetTracks(); _rtc->resetTracks();
} }
#if defined(ENABLE_FFMPEG)
_audio_dec = nullptr;
_audio_dec = nullptr;
_opus_mute_maker = nullptr;
#endif
if (_fmp4) { if (_fmp4) {
_fmp4->resetTracks(); _fmp4->resetTracks();
} }
@ -606,36 +519,10 @@ bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
} }
bool ret = false; bool ret = false;
RtspMediaSourceMuxer::Ptr rtc; if (_rtc && _rtc->inputFrame(frame))
RtmpMediaSourceMuxer::Ptr rtmp;
if (_rtc && _rtc->isEnabled())
rtc = _rtc;
if (_rtmp && _rtmp->isEnabled())
rtmp = _rtmp;
#if defined(ENABLE_FFMPEG)
if (_option.audio_transcode) {
if (frame->getCodecId() == CodecAAC) {
if (rtc) {
if (_audio_dec && rtc->readerCount())
_audio_dec->inputFrame(frame, true, false, false);
rtc = nullptr;
}
}
else if (frame->getTrackType() == TrackAudio) {
if (rtmp) {
if (_audio_dec && (rtmp->readerCount() || !rtmp->isRegisted()))
_audio_dec->inputFrame(frame, true, false, false);
rtmp = nullptr;
}
} else if (_opus_mute_maker && rtc) {
_opus_mute_maker->inputFrame(frame);
}
}
#endif
if (rtc && rtc->inputFrame(frame))
ret = true; ret = true;
if (rtmp && rtmp->inputFrame(frame)) if (_rtmp && _rtmp->inputFrame(frame))
ret = true; ret = true;
if (_rtsp && _rtsp->inputFrame(frame)) if (_rtsp && _rtsp->inputFrame(frame))

View File

@ -24,10 +24,6 @@ class RtmpMediaSourceMuxer;
class TSMediaSourceMuxer; class TSMediaSourceMuxer;
class FMP4MediaSourceMuxer; class FMP4MediaSourceMuxer;
class RtpSender; class RtpSender;
#ifdef ENABLE_FFMPEG
class FFmpegDecoder;
class FFmpegEncoder;
#endif
class MultiMediaSourceMuxer : public MediaSourceEventInterceptor, public MediaSink, public std::enable_shared_from_this<MultiMediaSourceMuxer>{ class MultiMediaSourceMuxer : public MediaSourceEventInterceptor, public MediaSink, public std::enable_shared_from_this<MultiMediaSourceMuxer>{
public: public:
@ -178,11 +174,6 @@ private:
std::shared_ptr<RtspMediaSourceMuxer> _rtsp; std::shared_ptr<RtspMediaSourceMuxer> _rtsp;
std::shared_ptr<TSMediaSourceMuxer> _ts; std::shared_ptr<TSMediaSourceMuxer> _ts;
std::shared_ptr<RtspMediaSourceMuxer> _rtc; std::shared_ptr<RtspMediaSourceMuxer> _rtc;
#if defined(ENABLE_FFMPEG)
MuteAudioMaker::Ptr _opus_mute_maker;
std::shared_ptr<FFmpegDecoder> _audio_dec;
std::shared_ptr<FFmpegEncoder> _audio_enc;
#endif
MediaSinkInterface::Ptr _mp4; MediaSinkInterface::Ptr _mp4;
std::shared_ptr<HlsRecorder> _hls; std::shared_ptr<HlsRecorder> _hls;
std::shared_ptr<HlsFMP4Recorder> _hls_fmp4; std::shared_ptr<HlsFMP4Recorder> _hls_fmp4;

199
webrtc/RtcMediaSource.cpp Normal file
View File

@ -0,0 +1,199 @@
#include "RtcMediaSource.h"
#include "Common/config.h"
#include "Codec/Transcode.h"
#include "Extension/AAC.h"
#include "Extension/Opus.h"
#include "Extension/G711.h"
// for RTC configure
#include "WebRtcTransport.h"
namespace mediakit {
bool needTransToOpus(CodecId codec) {
GET_CONFIG(int, transG711, Rtc::kTranscodeG711);
switch (codec)
{
case CodecG711U:
case CodecG711A:
return transG711;
case CodecAAC:
return true;
default:
return false;
}
}
bool needTransToAac(CodecId codec) {
GET_CONFIG(int, transG711, Rtc::kTranscodeG711);
switch (codec)
{
case CodecG711U:
case CodecG711A:
return transG711;
case CodecOpus:
return true;
default:
return false;
}
}
RtcMediaSourceMuxer::RtcMediaSourceMuxer(const MediaTuple& tuple, const ProtocolOption &option, const TitleSdp::Ptr &title)
: RtspMediaSourceMuxer(tuple, option, title, RTC_SCHEMA)
{
if (_option.audio_transcode) {
#ifndef ENABLE_FFMPEG
WarnL << "without ffmpeg, skip transcode setting";
_option.audio_transcode = false;
#endif
}
}
RtspMediaSource::Ptr RtcMediaSourceImp::clone(const std::string &stream) {
auto tuple = _tuple;
tuple.stream = stream;
auto src_imp = std::make_shared<RtcMediaSourceImp>(tuple);
src_imp->setSdp(getSdp());
src_imp->setProtocolOption(getProtocolOption());
return src_imp;
}
bool RtcMediaSourceMuxer::inputFrame(const Frame::Ptr &frame)
{
if (_clear_cache && _on_demand) {
_clear_cache = false;
_media_src->clearCache();
}
if (_enabled || !_on_demand) {
#if defined(ENABLE_FFMPEG)
if (_option.audio_transcode && needTransToOpus(frame->getCodecId())) {
if (!_audio_dec) { // addTrack可能没调, 这边根据情况再调一次
Track::Ptr track;
switch (frame->getCodecId())
{
case CodecAAC:
if (frame->prefixSize()) {
std::string cfg = makeAacConfig((uint8_t *)(frame->data()), frame->prefixSize());
track = std::make_shared<AACTrack>(cfg);
}
else {
track = std::make_shared<AACTrack>(44100, 2);
}
break;
case CodecG711A:
case CodecG711U:
track.reset(new G711Track(frame->getCodecId()));
break;
default:
break;
}
if (track)
addTrack(track);
if (!_audio_dec) return false;
}
if (readerCount()) {
_audio_dec->inputFrame(frame, true, false);
if (!_count)
InfoL << "start transcode " << frame->getCodecName() << "," << frame->pts() << "->Opus";
_count++;
}
else if (_count) {
InfoL << "stop transcode with " << _count << " items";
_count = 0;
}
return true;
}
#endif
return RtspMuxer::inputFrame(frame);
}
return false;
}
#if defined(ENABLE_FFMPEG)
bool RtcMediaSourceMuxer::addTrack(const Track::Ptr & track)
{
Track::Ptr newTrack = track;
if (_option.audio_transcode && needTransToOpus(track->getCodecId())) {
newTrack = std::make_shared<OpusTrack>();
GET_CONFIG(int, bitrate, General::kOpusBitrate);
newTrack->setBitRate(bitrate);
_audio_dec.reset(new FFmpegDecoder(track));
_audio_enc.reset(new FFmpegEncoder(newTrack));
// aac to opus
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
_audio_enc->inputFrame(frame, false);
});
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
RtspMuxer::inputFrame(frame);
});
}
return RtspMuxer::addTrack(newTrack);
}
void RtcMediaSourceMuxer::resetTracks()
{
RtspMuxer::resetTracks();
_audio_dec = nullptr;
_audio_enc = nullptr;
if (_count) {
InfoL << "stop transcode with " << _count << " items";
_count = 0;
}
}
bool RtcMediaSourceImp::addTrack(const Track::Ptr &track)
{
if (_muxer) {
Track::Ptr newTrack = track;
if (_option.audio_transcode && needTransToAac(track->getCodecId())) {
newTrack.reset(new AACTrack(44100, 2));
GET_CONFIG(int, bitrate, General::kAacBitrate);
newTrack->setBitRate(bitrate);
_audio_dec.reset(new FFmpegDecoder(track));
_audio_enc.reset(new FFmpegEncoder(newTrack));
// hook data to newTack
track->addDelegate([this](const Frame::Ptr &frame) -> bool {
if (_all_track_ready && 0 == _muxer->totalReaderCount()) {
if (_count) {
InfoL << "stop transcode with " << _count << " items";
_count = 0;
}
return true;
}
if (_audio_dec) {
if (!_count)
InfoL << "start transcode " << frame->getCodecName() << "," << frame->pts() << "->AAC";
_count++;
_audio_dec->inputFrame(frame, true, false);
}
return true;
});
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
_audio_enc->inputFrame(frame, false);
});
_audio_enc->setOnEncode([newTrack](const Frame::Ptr& frame) {
newTrack->inputFrame(frame);
});
}
if (_muxer->addTrack(newTrack)) {
newTrack->addDelegate(_muxer);
return true;
}
}
return false;
}
void RtcMediaSourceImp::resetTracks()
{
RtspMediaSourceImp::resetTracks();
_audio_dec = nullptr;
_audio_enc = nullptr;
if (_count) {
InfoL << "stop transcode with " << _count << " items";
_count = 0;
}
}
#endif
}

60
webrtc/RtcMediaSource.h Normal file
View File

@ -0,0 +1,60 @@
#ifndef ZLMEDIAKIT_RTCMEDIASOURCE_H
#define ZLMEDIAKIT_RTCMEDIASOURCE_H
#include "Rtsp/RtspMediaSourceMuxer.h"
#include "Rtsp/RtspMediaSourceImp.h"
namespace mediakit {
class FFmpegDecoder;
class FFmpegEncoder;
bool needTransToOpus(CodecId codec);
bool needTransToAac(CodecId codec);
class RtcMediaSourceImp : public RtspMediaSourceImp {
public:
typedef std::shared_ptr<RtcMediaSourceImp> Ptr;
RtcMediaSourceImp(const MediaTuple& tuple, int ringSize = RTP_GOP_SIZE)
: RtspMediaSourceImp(tuple, RTC_SCHEMA, ringSize) {
}
RtspMediaSource::Ptr clone(const std::string &stream);
#if defined(ENABLE_FFMPEG)
~RtcMediaSourceImp() override { resetTracks(); }
/**
* _demuxer触发的添加Track事件
*/
bool addTrack(const Track::Ptr &track) override;
void resetTracks() override;
private:
int _count = 0;
std::shared_ptr<FFmpegDecoder> _audio_dec;
std::shared_ptr<FFmpegEncoder> _audio_enc;
#endif
};
class RtcMediaSourceMuxer : public RtspMediaSourceMuxer {
public:
typedef std::shared_ptr<RtcMediaSourceMuxer> Ptr;
RtcMediaSourceMuxer( const MediaTuple& tuple,
const ProtocolOption &option,
const TitleSdp::Ptr &title = nullptr);
bool inputFrame(const Frame::Ptr &frame) override;
#if defined(ENABLE_FFMPEG)
~RtcMediaSourceMuxer() override{resetTracks();}
bool addTrack(const Track::Ptr & track) override;
void resetTracks() override;
private:
int _count = 0;
std::shared_ptr<FFmpegDecoder> _audio_dec;
std::shared_ptr<FFmpegEncoder> _audio_enc;
#endif
};
}
#endif

View File

@ -10,7 +10,7 @@
#include "WebRtcPusher.h" #include "WebRtcPusher.h"
#include "Common/config.h" #include "Common/config.h"
#include "Rtsp/RtspMediaSourceImp.h" #include "RtcMediaSource.h"
using namespace std; using namespace std;

View File

@ -18,7 +18,7 @@
#include "Rtsp/Rtsp.h" #include "Rtsp/Rtsp.h"
#include "Rtsp/RtpReceiver.h" #include "Rtsp/RtpReceiver.h"
#include "WebRtcTransport.h" #include "WebRtcTransport.h"
#include "RtcMediaSource.h"
#include "WebRtcEchoTest.h" #include "WebRtcEchoTest.h"
#include "WebRtcPlayer.h" #include "WebRtcPlayer.h"
#include "WebRtcPusher.h" #include "WebRtcPusher.h"
@ -45,6 +45,9 @@ const string kTimeOutSec = RTC_FIELD "timeoutSec";
const string kExternIP = RTC_FIELD "externIP"; const string kExternIP = RTC_FIELD "externIP";
// 设置remb比特率非0时关闭twcc并开启remb。该设置在rtc推流时有效可以控制推流画质 // 设置remb比特率非0时关闭twcc并开启remb。该设置在rtc推流时有效可以控制推流画质
const string kRembBitRate = RTC_FIELD "rembBitRate"; const string kRembBitRate = RTC_FIELD "rembBitRate";
// 是否转码G711音频做到: 出rtc将g711转成aac入rtc将g711转成opus
const string kTranscodeG711 = RTC_FIELD "transcodeG711";
// webrtc单端口udp服务器 // webrtc单端口udp服务器
const string kPort = RTC_FIELD "port"; const string kPort = RTC_FIELD "port";
@ -56,6 +59,7 @@ static onceToken token([]() {
mINI::Instance()[kRembBitRate] = 0; mINI::Instance()[kRembBitRate] = 0;
mINI::Instance()[kPort] = 0; mINI::Instance()[kPort] = 0;
mINI::Instance()[kTcpPort] = 0; mINI::Instance()[kTcpPort] = 0;
mINI::Instance()[kTranscodeG711] = 0;
}); });
} // namespace RTC } // namespace RTC
@ -1206,7 +1210,7 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
} }
if (!push_src) { if (!push_src) {
push_src = std::make_shared<RtspMediaSourceImp>(info, schema); push_src = std::make_shared<RtcMediaSourceImp>(info);
push_src_ownership = push_src->getOwnership(); push_src_ownership = push_src->getOwnership();
push_src->setProtocolOption(option); push_src->setProtocolOption(option);
} }

View File

@ -33,6 +33,7 @@ namespace Rtc {
extern const std::string kPort; extern const std::string kPort;
extern const std::string kTcpPort; extern const std::string kTcpPort;
extern const std::string kTimeOutSec; extern const std::string kTimeOutSec;
extern const std::string kTranscodeG711;
}//namespace RTC }//namespace RTC
class WebRtcInterface { class WebRtcInterface {