mirror of
https://github.com/ZLMediaKit/ZLMediaKit.git
synced 2024-11-22 19:00:01 +08:00
重构 audio_transcode 代码:
- 独立出 RtcMediaSource,并只对rtc开放 - 增加Rtc g711转码开关 - 修改说明
This commit is contained in:
parent
2adc12c4ab
commit
f4b2fd9c05
@ -50,9 +50,9 @@ auto_close=0
|
||||
#此参数不应大于播放器超时时间;单位毫秒
|
||||
continue_push_ms=15000
|
||||
# 是否启用音频转码
|
||||
# 转码主要基于包过滤机制,代码实现详见 MultiMediaSourceMuxer 的 onTrackReady 和 onTrackFrame 方法,当前已实现
|
||||
# 1. AAC转Opus, 然后传入rtc中
|
||||
# 2. 非AAC音频转aac,然后传入rtmp中
|
||||
# 主要实现进出RTC音频流的自动转码,代码实现详见 RtcMediaSource.h/cpp,当前实现
|
||||
# 1. 将RTC收到的Opus音频流,转成AAC流,并传到MultiMediaSourceMuxer中的其他流中
|
||||
# 2. 将从MultiMediaSourceMuxer中收到的AAC音频流,转成OPUS,并送到RTC流中
|
||||
# 音频转码底层使用FFMPEG来实现,需要打开FFMPEG, 即编译时必须指定 -DENABLE_FFMPEG=1, 当前已知支持FFMPEG 4.x 5.x 和 6.0,
|
||||
# 在ubuntu中可通过以下指令来安装: apt-get install libavcodec-dev libavutil-dev libswscale-dev libresample-dev
|
||||
# 此外音频转码正常都是用于webrtc的,一般也会开启WEBRTC, 即-DENABLE_WEBRTC=1, 此前必须自己装好libsrtp库, 安装过程详见wiki
|
||||
@ -354,6 +354,10 @@ preferredCodecA=PCMU,PCMA,opus,mpeg4-generic
|
||||
#rtc支持的视频codec类型,在前面的优先级更高
|
||||
#以下范例为所有支持的视频codec
|
||||
preferredCodecV=H264,H265,AV1,VP9,VP8
|
||||
# 是否开启RTC协议的G711转码,开启后
|
||||
# 能将传给rtc的g711音频转成opus
|
||||
# 将由rtc流入g711音频转成aac,并转给其他协议流
|
||||
transcodeG711=0
|
||||
|
||||
[srt]
|
||||
#srt播放推流、播放超时时间,单位秒
|
||||
|
@ -11,9 +11,6 @@
|
||||
#include <math.h>
|
||||
#include "Common/config.h"
|
||||
#include "MultiMediaSourceMuxer.h"
|
||||
#include "Extension/AAC.h"
|
||||
#include "Extension/Opus.h"
|
||||
#include "Extension/G711.h"
|
||||
#include "Rtp/RtpSender.h"
|
||||
#include "Record/HlsRecorder.h"
|
||||
#include "Record/HlsMediaSource.h"
|
||||
@ -21,9 +18,8 @@
|
||||
#include "Rtmp/RtmpMediaSourceMuxer.h"
|
||||
#include "TS/TSMediaSourceMuxer.h"
|
||||
#include "FMP4/FMP4MediaSourceMuxer.h"
|
||||
|
||||
#ifdef ENABLE_FFMPEG
|
||||
#include "Codec/Transcode.h"
|
||||
#if defined(ENABLE_WEBRTC)
|
||||
#include "webrtc/RtcMediaSource.h"
|
||||
#endif
|
||||
using namespace std;
|
||||
using namespace toolkit;
|
||||
@ -122,7 +118,7 @@ MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_
|
||||
}
|
||||
if (option.enable_rtc) {
|
||||
#if defined(ENABLE_WEBRTC)
|
||||
_rtc = std::make_shared<RtspMediaSourceMuxer>(_tuple, option, std::make_shared<TitleSdp>(dur_sec), RTC_SCHEMA);
|
||||
_rtc = std::make_shared<RtcMediaSourceMuxer>(_tuple, option, std::make_shared<TitleSdp>(dur_sec));
|
||||
#endif
|
||||
}
|
||||
if (option.enable_hls) {
|
||||
@ -137,14 +133,6 @@ MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_
|
||||
if (option.enable_ts) {
|
||||
_ts = dynamic_pointer_cast<TSMediaSourceMuxer>(Recorder::createRecorder(Recorder::type_ts, _tuple, option));
|
||||
}
|
||||
if (option.audio_transcode) {
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
InfoL << "enable audio_transcode";
|
||||
#else
|
||||
InfoL << "without ffmpeg disable audio_transcode";
|
||||
_option.audio_transcode = false;
|
||||
#endif
|
||||
}
|
||||
|
||||
if (option.enable_fmp4) {
|
||||
_fmp4 = dynamic_pointer_cast<FMP4MediaSourceMuxer>(Recorder::createRecorder(Recorder::type_fmp4, _tuple, option));
|
||||
@ -407,79 +395,9 @@ std::shared_ptr<MultiMediaSourceMuxer> MultiMediaSourceMuxer::getMuxer(MediaSour
|
||||
|
||||
bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {
|
||||
bool ret = false;
|
||||
auto rtmp = _rtmp;
|
||||
auto rtc = _rtc;
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
if (_option.audio_transcode) {
|
||||
if (track->getCodecId() == CodecAAC) {
|
||||
if (rtmp) {
|
||||
rtmp->addTrack(track);
|
||||
rtmp = nullptr;
|
||||
}
|
||||
_audio_dec = nullptr;
|
||||
_audio_enc = nullptr;
|
||||
_opus_mute_maker = nullptr;
|
||||
if (rtc) {
|
||||
Track::Ptr newTrack(new OpusTrack());
|
||||
GET_CONFIG(int, bitrate, General::kOpusBitrate);
|
||||
newTrack->setBitRate(bitrate);
|
||||
rtc->addTrack(newTrack);
|
||||
rtc = nullptr;
|
||||
if (!hasMuteAudio()) {
|
||||
// aac to opus
|
||||
_audio_dec.reset(new FFmpegDecoder(track));
|
||||
_audio_enc.reset(new FFmpegEncoder(newTrack));
|
||||
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
|
||||
_audio_enc->inputFrame(frame, false);
|
||||
});
|
||||
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
|
||||
// fill data to _rtc
|
||||
if (_rtc && _rtc->isEnabled())
|
||||
_rtc->inputFrame(frame);
|
||||
});
|
||||
}
|
||||
else {
|
||||
_opus_mute_maker = std::make_shared<MuteAudioMaker>(CodecOpus);
|
||||
_opus_mute_maker->addDelegate([this](const Frame::Ptr &frame) {
|
||||
if (_rtc && _rtc->isEnabled())
|
||||
_rtc->inputFrame(frame);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (track->getTrackType() == TrackAudio) {
|
||||
if (rtc) {
|
||||
rtc->addTrack(track);
|
||||
rtc = nullptr;
|
||||
}
|
||||
_audio_dec = nullptr;
|
||||
_audio_enc = nullptr;
|
||||
_opus_mute_maker = nullptr;
|
||||
if (rtmp) {
|
||||
Track::Ptr newTrack(new AACTrack(44100, std::dynamic_pointer_cast<AudioTrack>(track)->getAudioChannel()));
|
||||
GET_CONFIG(int, bitrate, General::kAacBitrate);
|
||||
newTrack->setBitRate(bitrate);
|
||||
rtmp->addTrack(newTrack);
|
||||
rtmp = nullptr;
|
||||
|
||||
_audio_dec.reset(new FFmpegDecoder(track));
|
||||
_audio_enc.reset(new FFmpegEncoder(newTrack));
|
||||
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
|
||||
_audio_enc->inputFrame(frame, false);
|
||||
});
|
||||
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
|
||||
// fill aac frame to rtmp
|
||||
if (_rtmp && _rtmp->isEnabled())
|
||||
_rtmp->inputFrame(frame);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (rtc && rtc->addTrack(track))
|
||||
if (_rtc && _rtc->addTrack(track))
|
||||
ret = true;
|
||||
if (rtmp && rtmp->addTrack(track))
|
||||
if (_rtmp && _rtmp->addTrack(track))
|
||||
ret = true;
|
||||
if (_rtsp && _rtsp->addTrack(track))
|
||||
ret = true;
|
||||
@ -579,11 +497,6 @@ void MultiMediaSourceMuxer::resetTracks() {
|
||||
if (_rtc) {
|
||||
_rtc->resetTracks();
|
||||
}
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
_audio_dec = nullptr;
|
||||
_audio_dec = nullptr;
|
||||
_opus_mute_maker = nullptr;
|
||||
#endif
|
||||
if (_fmp4) {
|
||||
_fmp4->resetTracks();
|
||||
}
|
||||
@ -606,36 +519,10 @@ bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
|
||||
}
|
||||
|
||||
bool ret = false;
|
||||
RtspMediaSourceMuxer::Ptr rtc;
|
||||
RtmpMediaSourceMuxer::Ptr rtmp;
|
||||
if (_rtc && _rtc->isEnabled())
|
||||
rtc = _rtc;
|
||||
if (_rtmp && _rtmp->isEnabled())
|
||||
rtmp = _rtmp;
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
if (_option.audio_transcode) {
|
||||
if (frame->getCodecId() == CodecAAC) {
|
||||
if (rtc) {
|
||||
if (_audio_dec && rtc->readerCount())
|
||||
_audio_dec->inputFrame(frame, true, false, false);
|
||||
rtc = nullptr;
|
||||
}
|
||||
}
|
||||
else if (frame->getTrackType() == TrackAudio) {
|
||||
if (rtmp) {
|
||||
if (_audio_dec && (rtmp->readerCount() || !rtmp->isRegisted()))
|
||||
_audio_dec->inputFrame(frame, true, false, false);
|
||||
rtmp = nullptr;
|
||||
}
|
||||
} else if (_opus_mute_maker && rtc) {
|
||||
_opus_mute_maker->inputFrame(frame);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (rtc && rtc->inputFrame(frame))
|
||||
if (_rtc && _rtc->inputFrame(frame))
|
||||
ret = true;
|
||||
|
||||
if (rtmp && rtmp->inputFrame(frame))
|
||||
if (_rtmp && _rtmp->inputFrame(frame))
|
||||
ret = true;
|
||||
|
||||
if (_rtsp && _rtsp->inputFrame(frame))
|
||||
|
@ -24,10 +24,6 @@ class RtmpMediaSourceMuxer;
|
||||
class TSMediaSourceMuxer;
|
||||
class FMP4MediaSourceMuxer;
|
||||
class RtpSender;
|
||||
#ifdef ENABLE_FFMPEG
|
||||
class FFmpegDecoder;
|
||||
class FFmpegEncoder;
|
||||
#endif
|
||||
|
||||
class MultiMediaSourceMuxer : public MediaSourceEventInterceptor, public MediaSink, public std::enable_shared_from_this<MultiMediaSourceMuxer>{
|
||||
public:
|
||||
@ -178,11 +174,6 @@ private:
|
||||
std::shared_ptr<RtspMediaSourceMuxer> _rtsp;
|
||||
std::shared_ptr<TSMediaSourceMuxer> _ts;
|
||||
std::shared_ptr<RtspMediaSourceMuxer> _rtc;
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
MuteAudioMaker::Ptr _opus_mute_maker;
|
||||
std::shared_ptr<FFmpegDecoder> _audio_dec;
|
||||
std::shared_ptr<FFmpegEncoder> _audio_enc;
|
||||
#endif
|
||||
MediaSinkInterface::Ptr _mp4;
|
||||
std::shared_ptr<HlsRecorder> _hls;
|
||||
std::shared_ptr<HlsFMP4Recorder> _hls_fmp4;
|
||||
|
199
webrtc/RtcMediaSource.cpp
Normal file
199
webrtc/RtcMediaSource.cpp
Normal file
@ -0,0 +1,199 @@
|
||||
#include "RtcMediaSource.h"
|
||||
#include "Common/config.h"
|
||||
#include "Codec/Transcode.h"
|
||||
#include "Extension/AAC.h"
|
||||
#include "Extension/Opus.h"
|
||||
#include "Extension/G711.h"
|
||||
// for RTC configure
|
||||
#include "WebRtcTransport.h"
|
||||
namespace mediakit {
|
||||
|
||||
bool needTransToOpus(CodecId codec) {
|
||||
GET_CONFIG(int, transG711, Rtc::kTranscodeG711);
|
||||
switch (codec)
|
||||
{
|
||||
case CodecG711U:
|
||||
case CodecG711A:
|
||||
return transG711;
|
||||
case CodecAAC:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool needTransToAac(CodecId codec) {
|
||||
GET_CONFIG(int, transG711, Rtc::kTranscodeG711);
|
||||
switch (codec)
|
||||
{
|
||||
case CodecG711U:
|
||||
case CodecG711A:
|
||||
return transG711;
|
||||
case CodecOpus:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
RtcMediaSourceMuxer::RtcMediaSourceMuxer(const MediaTuple& tuple, const ProtocolOption &option, const TitleSdp::Ptr &title)
|
||||
: RtspMediaSourceMuxer(tuple, option, title, RTC_SCHEMA)
|
||||
{
|
||||
if (_option.audio_transcode) {
|
||||
#ifndef ENABLE_FFMPEG
|
||||
WarnL << "without ffmpeg, skip transcode setting";
|
||||
_option.audio_transcode = false;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
RtspMediaSource::Ptr RtcMediaSourceImp::clone(const std::string &stream) {
|
||||
auto tuple = _tuple;
|
||||
tuple.stream = stream;
|
||||
auto src_imp = std::make_shared<RtcMediaSourceImp>(tuple);
|
||||
src_imp->setSdp(getSdp());
|
||||
src_imp->setProtocolOption(getProtocolOption());
|
||||
return src_imp;
|
||||
}
|
||||
|
||||
bool RtcMediaSourceMuxer::inputFrame(const Frame::Ptr &frame)
|
||||
{
|
||||
if (_clear_cache && _on_demand) {
|
||||
_clear_cache = false;
|
||||
_media_src->clearCache();
|
||||
}
|
||||
if (_enabled || !_on_demand) {
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
if (_option.audio_transcode && needTransToOpus(frame->getCodecId())) {
|
||||
if (!_audio_dec) { // addTrack可能没调, 这边根据情况再调一次
|
||||
Track::Ptr track;
|
||||
switch (frame->getCodecId())
|
||||
{
|
||||
case CodecAAC:
|
||||
if (frame->prefixSize()) {
|
||||
std::string cfg = makeAacConfig((uint8_t *)(frame->data()), frame->prefixSize());
|
||||
track = std::make_shared<AACTrack>(cfg);
|
||||
}
|
||||
else {
|
||||
track = std::make_shared<AACTrack>(44100, 2);
|
||||
}
|
||||
break;
|
||||
case CodecG711A:
|
||||
case CodecG711U:
|
||||
track.reset(new G711Track(frame->getCodecId()));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
if (track)
|
||||
addTrack(track);
|
||||
if (!_audio_dec) return false;
|
||||
}
|
||||
if (readerCount()) {
|
||||
_audio_dec->inputFrame(frame, true, false);
|
||||
if (!_count)
|
||||
InfoL << "start transcode " << frame->getCodecName() << "," << frame->pts() << "->Opus";
|
||||
_count++;
|
||||
}
|
||||
else if (_count) {
|
||||
InfoL << "stop transcode with " << _count << " items";
|
||||
_count = 0;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
return RtspMuxer::inputFrame(frame);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
|
||||
bool RtcMediaSourceMuxer::addTrack(const Track::Ptr & track)
|
||||
{
|
||||
Track::Ptr newTrack = track;
|
||||
if (_option.audio_transcode && needTransToOpus(track->getCodecId())) {
|
||||
newTrack = std::make_shared<OpusTrack>();
|
||||
GET_CONFIG(int, bitrate, General::kOpusBitrate);
|
||||
newTrack->setBitRate(bitrate);
|
||||
_audio_dec.reset(new FFmpegDecoder(track));
|
||||
_audio_enc.reset(new FFmpegEncoder(newTrack));
|
||||
// aac to opus
|
||||
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
|
||||
_audio_enc->inputFrame(frame, false);
|
||||
});
|
||||
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
|
||||
RtspMuxer::inputFrame(frame);
|
||||
});
|
||||
}
|
||||
return RtspMuxer::addTrack(newTrack);
|
||||
}
|
||||
|
||||
|
||||
void RtcMediaSourceMuxer::resetTracks()
|
||||
{
|
||||
RtspMuxer::resetTracks();
|
||||
_audio_dec = nullptr;
|
||||
_audio_enc = nullptr;
|
||||
if (_count) {
|
||||
InfoL << "stop transcode with " << _count << " items";
|
||||
_count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
bool RtcMediaSourceImp::addTrack(const Track::Ptr &track)
|
||||
{
|
||||
if (_muxer) {
|
||||
Track::Ptr newTrack = track;
|
||||
if (_option.audio_transcode && needTransToAac(track->getCodecId())) {
|
||||
newTrack.reset(new AACTrack(44100, 2));
|
||||
GET_CONFIG(int, bitrate, General::kAacBitrate);
|
||||
newTrack->setBitRate(bitrate);
|
||||
_audio_dec.reset(new FFmpegDecoder(track));
|
||||
_audio_enc.reset(new FFmpegEncoder(newTrack));
|
||||
// hook data to newTack
|
||||
track->addDelegate([this](const Frame::Ptr &frame) -> bool {
|
||||
if (_all_track_ready && 0 == _muxer->totalReaderCount()) {
|
||||
if (_count) {
|
||||
InfoL << "stop transcode with " << _count << " items";
|
||||
_count = 0;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (_audio_dec) {
|
||||
if (!_count)
|
||||
InfoL << "start transcode " << frame->getCodecName() << "," << frame->pts() << "->AAC";
|
||||
_count++;
|
||||
_audio_dec->inputFrame(frame, true, false);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
|
||||
_audio_enc->inputFrame(frame, false);
|
||||
});
|
||||
_audio_enc->setOnEncode([newTrack](const Frame::Ptr& frame) {
|
||||
newTrack->inputFrame(frame);
|
||||
});
|
||||
}
|
||||
|
||||
if (_muxer->addTrack(newTrack)) {
|
||||
newTrack->addDelegate(_muxer);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void RtcMediaSourceImp::resetTracks()
|
||||
{
|
||||
RtspMediaSourceImp::resetTracks();
|
||||
_audio_dec = nullptr;
|
||||
_audio_enc = nullptr;
|
||||
if (_count) {
|
||||
InfoL << "stop transcode with " << _count << " items";
|
||||
_count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
}
|
60
webrtc/RtcMediaSource.h
Normal file
60
webrtc/RtcMediaSource.h
Normal file
@ -0,0 +1,60 @@
|
||||
#ifndef ZLMEDIAKIT_RTCMEDIASOURCE_H
|
||||
#define ZLMEDIAKIT_RTCMEDIASOURCE_H
|
||||
|
||||
#include "Rtsp/RtspMediaSourceMuxer.h"
|
||||
#include "Rtsp/RtspMediaSourceImp.h"
|
||||
namespace mediakit {
|
||||
class FFmpegDecoder;
|
||||
class FFmpegEncoder;
|
||||
bool needTransToOpus(CodecId codec);
|
||||
bool needTransToAac(CodecId codec);
|
||||
|
||||
class RtcMediaSourceImp : public RtspMediaSourceImp {
|
||||
public:
|
||||
typedef std::shared_ptr<RtcMediaSourceImp> Ptr;
|
||||
|
||||
RtcMediaSourceImp(const MediaTuple& tuple, int ringSize = RTP_GOP_SIZE)
|
||||
: RtspMediaSourceImp(tuple, RTC_SCHEMA, ringSize) {
|
||||
}
|
||||
|
||||
RtspMediaSource::Ptr clone(const std::string &stream);
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
~RtcMediaSourceImp() override { resetTracks(); }
|
||||
/**
|
||||
* _demuxer触发的添加Track事件
|
||||
*/
|
||||
bool addTrack(const Track::Ptr &track) override;
|
||||
void resetTracks() override;
|
||||
private:
|
||||
int _count = 0;
|
||||
std::shared_ptr<FFmpegDecoder> _audio_dec;
|
||||
std::shared_ptr<FFmpegEncoder> _audio_enc;
|
||||
#endif
|
||||
};
|
||||
|
||||
class RtcMediaSourceMuxer : public RtspMediaSourceMuxer {
|
||||
public:
|
||||
typedef std::shared_ptr<RtcMediaSourceMuxer> Ptr;
|
||||
|
||||
RtcMediaSourceMuxer( const MediaTuple& tuple,
|
||||
const ProtocolOption &option,
|
||||
const TitleSdp::Ptr &title = nullptr);
|
||||
|
||||
|
||||
bool inputFrame(const Frame::Ptr &frame) override;
|
||||
|
||||
#if defined(ENABLE_FFMPEG)
|
||||
~RtcMediaSourceMuxer() override{resetTracks();}
|
||||
|
||||
bool addTrack(const Track::Ptr & track) override;
|
||||
void resetTracks() override;
|
||||
|
||||
private:
|
||||
int _count = 0;
|
||||
std::shared_ptr<FFmpegDecoder> _audio_dec;
|
||||
std::shared_ptr<FFmpegEncoder> _audio_enc;
|
||||
#endif
|
||||
};
|
||||
|
||||
}
|
||||
#endif
|
@ -10,7 +10,7 @@
|
||||
|
||||
#include "WebRtcPusher.h"
|
||||
#include "Common/config.h"
|
||||
#include "Rtsp/RtspMediaSourceImp.h"
|
||||
#include "RtcMediaSource.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
||||
#include "Rtsp/Rtsp.h"
|
||||
#include "Rtsp/RtpReceiver.h"
|
||||
#include "WebRtcTransport.h"
|
||||
|
||||
#include "RtcMediaSource.h"
|
||||
#include "WebRtcEchoTest.h"
|
||||
#include "WebRtcPlayer.h"
|
||||
#include "WebRtcPusher.h"
|
||||
@ -45,6 +45,9 @@ const string kTimeOutSec = RTC_FIELD "timeoutSec";
|
||||
const string kExternIP = RTC_FIELD "externIP";
|
||||
// 设置remb比特率,非0时关闭twcc并开启remb。该设置在rtc推流时有效,可以控制推流画质
|
||||
const string kRembBitRate = RTC_FIELD "rembBitRate";
|
||||
// 是否转码G711音频,做到: 出rtc将g711转成aac,入rtc将g711转成opus
|
||||
const string kTranscodeG711 = RTC_FIELD "transcodeG711";
|
||||
|
||||
// webrtc单端口udp服务器
|
||||
const string kPort = RTC_FIELD "port";
|
||||
|
||||
@ -56,6 +59,7 @@ static onceToken token([]() {
|
||||
mINI::Instance()[kRembBitRate] = 0;
|
||||
mINI::Instance()[kPort] = 0;
|
||||
mINI::Instance()[kTcpPort] = 0;
|
||||
mINI::Instance()[kTranscodeG711] = 0;
|
||||
});
|
||||
|
||||
} // namespace RTC
|
||||
@ -1206,7 +1210,7 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
|
||||
}
|
||||
|
||||
if (!push_src) {
|
||||
push_src = std::make_shared<RtspMediaSourceImp>(info, schema);
|
||||
push_src = std::make_shared<RtcMediaSourceImp>(info);
|
||||
push_src_ownership = push_src->getOwnership();
|
||||
push_src->setProtocolOption(option);
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ namespace Rtc {
|
||||
extern const std::string kPort;
|
||||
extern const std::string kTcpPort;
|
||||
extern const std::string kTimeOutSec;
|
||||
extern const std::string kTranscodeG711;
|
||||
}//namespace RTC
|
||||
|
||||
class WebRtcInterface {
|
||||
|
Loading…
Reference in New Issue
Block a user