mirror of
https://github.com/ZLMediaKit/ZLMediaKit.git
synced 2024-11-22 19:00:01 +08:00
Merge branch 'master' into dev
This commit is contained in:
commit
949a525bf0
@ -1 +1 @@
|
||||
Subproject commit 8bc32a516b279414f749d0dead8bdc2837d3c527
|
||||
Subproject commit 527c0f5117b489fda78fcd123d446370ddd9ec9a
|
@ -532,3 +532,9 @@ endif ()
|
||||
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/www" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
|
||||
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/config.ini" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
|
||||
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/default.pem" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
|
||||
|
||||
# 拷贝VideoStack 无视频流时默认填充的背景图片
|
||||
# Copy the default background image used by VideoStack when there is no video stream
|
||||
if (ENABLE_FFMPEG AND ENABLE_X264)
|
||||
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/conf/novideo.yuv" DESTINATION ${EXECUTABLE_OUTPUT_PATH})
|
||||
endif ()
|
@ -33,12 +33,11 @@ static TcpServer::Ptr shell_server;
|
||||
|
||||
#ifdef ENABLE_RTPPROXY
|
||||
#include "Rtp/RtpServer.h"
|
||||
static std::shared_ptr<RtpServer> rtpServer;
|
||||
static RtpServer::Ptr rtpServer;
|
||||
#endif
|
||||
|
||||
#ifdef ENABLE_WEBRTC
|
||||
#include "../webrtc/WebRtcSession.h"
|
||||
#include "../webrtc/WebRtcTransport.h"
|
||||
static UdpServer::Ptr rtcServer_udp;
|
||||
static TcpServer::Ptr rtcServer_tcp;
|
||||
#endif
|
||||
|
@ -277,6 +277,8 @@ sampleMS=500
|
||||
fastStart=0
|
||||
#MP4点播(rtsp/rtmp/http-flv/ws-flv)是否循环播放文件
|
||||
fileRepeat=0
|
||||
#MP4录制写文件格式是否采用fmp4,启用的话,断电未完成录制的文件也能正常打开
|
||||
enableFmp4=0
|
||||
|
||||
[rtmp]
|
||||
#rtmp必须在此时间内完成握手,否则服务器会断开链接,单位秒
|
||||
@ -332,6 +334,9 @@ gop_cache=1
|
||||
#国标发送g711 rtp 打包时,每个包的语音时长是多少,默认是100 ms,范围为20~180ms (gb28181-2016,c.2.4规定),
|
||||
#最好为20 的倍数,程序自动向20的倍数取整
|
||||
rtp_g711_dur_ms = 100
|
||||
#udp接收数据socket buffer大小配置
|
||||
#4*1024*1024=4196304
|
||||
udp_recv_socket_buffer=4194304
|
||||
|
||||
[rtc]
|
||||
#rtc播放推流、播放超时时间
|
||||
|
1
conf/novideo.yuv
Normal file
1
conf/novideo.yuv
Normal file
File diff suppressed because one or more lines are too long
@ -38,16 +38,12 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
|
||||
auto ptr = _cache_frame->data() + _cache_frame->prefixSize();
|
||||
auto len = _cache_frame->size() - _cache_frame->prefixSize();
|
||||
auto remain_size = len;
|
||||
auto max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
|
||||
uint32_t n = 0;
|
||||
size_t max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
|
||||
size_t n = 0;
|
||||
bool mark = true;
|
||||
while (remain_size >= max_size) {
|
||||
size_t rtp_size;
|
||||
if (remain_size >= max_size) {
|
||||
rtp_size = max_size;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
assert(remain_size >= max_size);
|
||||
const size_t rtp_size = max_size;
|
||||
n++;
|
||||
stamp += _pkt_dur_ms;
|
||||
RtpCodec::inputRtp(getRtpInfo().makeRtp(TrackAudio, ptr, rtp_size, mark, stamp), true);
|
||||
|
@ -31,7 +31,9 @@ if(PKG_CONFIG_FOUND)
|
||||
list(APPEND LINK_LIBRARIES PkgConfig::SDL2)
|
||||
message(STATUS "found library: ${SDL2_LIBRARIES}")
|
||||
endif()
|
||||
else()
|
||||
endif()
|
||||
|
||||
if(NOT SDL2_FOUND)
|
||||
find_package(SDL2 QUIET)
|
||||
if(SDL2_FOUND)
|
||||
include_directories(SYSTEM ${SDL2_INCLUDE_DIR})
|
||||
|
@ -1,11 +1,10 @@
|
||||
{
|
||||
"info": {
|
||||
"_postman_id": "08e3bc35-5318-4949-81bb-90d854706194",
|
||||
"_postman_id": "8b3cdc62-3e18-4700-9ddd-dc9f58ebce83",
|
||||
"name": "ZLMediaKit",
|
||||
"description": "媒体服务器",
|
||||
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json",
|
||||
"_exporter_id": "29185956",
|
||||
"_collection_link": "https://lively-station-598157.postman.co/workspace/%E6%B5%81%E5%AA%92%E4%BD%93%E6%9C%8D%E5%8A%A1~1e119172-45b0-4ed6-b1fc-8a15d0e2d5f8/collection/29185956-08e3bc35-5318-4949-81bb-90d854706194?action=share&source=collection_link&creator=29185956"
|
||||
"_exporter_id": "26338564"
|
||||
},
|
||||
"item": [
|
||||
{
|
||||
@ -34,6 +33,72 @@
|
||||
},
|
||||
"response": []
|
||||
},
|
||||
{
|
||||
"name": "关闭多屏拼接(stack/stop)",
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"header": [],
|
||||
"url": {
|
||||
"raw": "{{ZLMediaKit_URL}}/index/api/getApiList?secret={{ZLMediaKit_secret}}&id=stack_test",
|
||||
"host": [
|
||||
"{{ZLMediaKit_URL}}"
|
||||
],
|
||||
"path": [
|
||||
"index",
|
||||
"api",
|
||||
"getApiList"
|
||||
],
|
||||
"query": [
|
||||
{
|
||||
"key": "secret",
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
},
|
||||
{
|
||||
"key": "id",
|
||||
"value": "stack_test"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"response": []
|
||||
},
|
||||
{
|
||||
"name": "添加多屏拼接(stack/start)",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"header": [],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\r\n \"gapv\": 0.002,\r\n \"gaph\": 0.001,\r\n \"width\": 1920,\r\n \"url\": [\r\n [\r\n \"rtsp://kkem.me/live/test3\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy2\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy1\",\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy3\",\r\n \"rtsp://kkem.me/live/cy4\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy5\",\r\n \"rtsp://kkem.me/live/cy6\",\r\n \"rtsp://kkem.me/live/cy7\",\r\n \"rtsp://kkem.me/live/cy8\"\r\n ],\r\n [\r\n \"rtsp://kkem.me/live/cy9\",\r\n \"rtsp://kkem.me/live/cy10\",\r\n \"rtsp://kkem.me/live/cy11\",\r\n \"rtsp://kkem.me/live/cy12\"\r\n ]\r\n ],\r\n \"id\": \"89\",\r\n \"row\": 4,\r\n \"col\": 4,\r\n \"height\": 1080,\r\n \"span\": [\r\n [\r\n [\r\n 0,\r\n 0\r\n ],\r\n [\r\n 1,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 3,\r\n 0\r\n ],\r\n [\r\n 3,\r\n 1\r\n ]\r\n ],\r\n [\r\n [\r\n 2,\r\n 3\r\n ],\r\n [\r\n 3,\r\n 3\r\n ]\r\n ]\r\n ]\r\n}",
|
||||
"options": {
|
||||
"raw": {
|
||||
"language": "json"
|
||||
}
|
||||
}
|
||||
},
|
||||
"url": {
|
||||
"raw": "{{ZLMediaKit_URL}}/index/api/stack/start?secret={{ZLMediaKit_secret}}",
|
||||
"host": [
|
||||
"{{ZLMediaKit_URL}}"
|
||||
],
|
||||
"path": [
|
||||
"index",
|
||||
"api",
|
||||
"stack",
|
||||
"start"
|
||||
],
|
||||
"query": [
|
||||
{
|
||||
"key": "secret",
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"response": []
|
||||
},
|
||||
{
|
||||
"name": "获取网络线程负载(getThreadsLoad)",
|
||||
"request": {
|
||||
@ -1470,9 +1535,9 @@
|
||||
"disabled": true
|
||||
},
|
||||
{
|
||||
"key": "only_audio",
|
||||
"key": "only_track",
|
||||
"value": "1",
|
||||
"description": "是否为单音频track,用于语音对讲",
|
||||
"description": "是否为单音频/单视频track,0:不设置,1:单音频,2:单视频",
|
||||
"disabled": true
|
||||
},
|
||||
{
|
||||
@ -1523,9 +1588,9 @@
|
||||
"description": "该端口绑定的流id\n"
|
||||
},
|
||||
{
|
||||
"key": "only_audio",
|
||||
"key": "only_track",
|
||||
"value": "0",
|
||||
"description": "是否为单音频track,用于语音对讲",
|
||||
"description": "是否为单音频/单视频track,0:不设置,1:单音频,2:单视频",
|
||||
"disabled": true
|
||||
},
|
||||
{
|
||||
|
590
server/VideoStack.cpp
Normal file
590
server/VideoStack.cpp
Normal file
@ -0,0 +1,590 @@
|
||||
#if defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
|
||||
#include "VideoStack.h"
|
||||
#include "Codec/Transcode.h"
|
||||
#include "Common/Device.h"
|
||||
#include "Util/logger.h"
|
||||
#include "Util/util.h"
|
||||
#include "json/value.h"
|
||||
#include <Thread/WorkThreadPool.h>
|
||||
#include <fstream>
|
||||
#include <libavutil/pixfmt.h>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
|
||||
// ITU-R BT.601
|
||||
// #define RGB_TO_Y(R, G, B) ((( 66 * (R) + 129 * (G) + 25 * (B)+128) >> 8)+16)
|
||||
// #define RGB_TO_U(R, G, B) (((-38 * (R) - 74 * (G) + 112 * (B)+128) >> 8)+128)
|
||||
// #define RGB_TO_V(R, G, B) (((112 * (R) - 94 * (G) - 18 * (B)+128) >> 8)+128)
|
||||
|
||||
// ITU-R BT.709
|
||||
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
|
||||
#define RGB_TO_U(R, G, B) (((-26 * (R)-87 * (G) + 112 * (B) + 128) >> 8) + 128)
|
||||
#define RGB_TO_V(R, G, B) (((112 * (R)-102 * (G)-10 * (B) + 128) >> 8) + 128)
|
||||
|
||||
INSTANCE_IMP(VideoStackManager)
|
||||
|
||||
Param::~Param()
|
||||
{
|
||||
VideoStackManager::Instance().unrefChannel(
|
||||
id, width, height, pixfmt);
|
||||
}
|
||||
|
||||
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
|
||||
: _id(id)
|
||||
, _width(width)
|
||||
, _height(height)
|
||||
, _pixfmt(pixfmt)
|
||||
{
|
||||
_tmp = std::make_shared<mediakit::FFmpegFrame>();
|
||||
|
||||
_tmp->get()->width = _width;
|
||||
_tmp->get()->height = _height;
|
||||
_tmp->get()->format = _pixfmt;
|
||||
|
||||
av_frame_get_buffer(_tmp->get(), 32);
|
||||
|
||||
memset(_tmp->get()->data[0], 0, _tmp->get()->linesize[0] * _height);
|
||||
memset(_tmp->get()->data[1], 0, _tmp->get()->linesize[1] * _height / 2);
|
||||
memset(_tmp->get()->data[2], 0, _tmp->get()->linesize[2] * _height / 2);
|
||||
|
||||
auto frame = VideoStackManager::Instance().getBgImg();
|
||||
_sws = std::make_shared<mediakit::FFmpegSws>(_pixfmt, _width, _height);
|
||||
|
||||
_tmp = _sws->inputFrame(frame);
|
||||
}
|
||||
|
||||
void Channel::addParam(const std::weak_ptr<Param>& p)
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
_params.push_back(p);
|
||||
}
|
||||
|
||||
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
|
||||
{
|
||||
std::weak_ptr<Channel> weakSelf = shared_from_this();
|
||||
// toolkit::WorkThreadPool::Instance().getFirstPoller()->async([weakSelf, frame]() {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
self->_tmp = self->_sws->inputFrame(frame);
|
||||
|
||||
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
|
||||
// });
|
||||
}
|
||||
|
||||
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
|
||||
{
|
||||
for (auto& wp : _params) {
|
||||
if (auto sp = wp.lock()) {
|
||||
func(sp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Channel::fillBuffer(const Param::Ptr& p)
|
||||
{
|
||||
if (auto buf = p->weak_buf.lock()) {
|
||||
copyData(buf, p);
|
||||
}
|
||||
}
|
||||
|
||||
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
|
||||
{
|
||||
|
||||
switch (p->pixfmt) {
|
||||
case AV_PIX_FMT_YUV420P: {
|
||||
for (int i = 0; i < p->height; i++) {
|
||||
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
|
||||
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
|
||||
_tmp->get()->width);
|
||||
}
|
||||
//确保height为奇数时,也能正确的复制到最后一行uv数据
|
||||
for (int i = 0; i < (p->height + 1) / 2; i++) {
|
||||
// U平面
|
||||
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
|
||||
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
|
||||
_tmp->get()->width / 2);
|
||||
|
||||
// V平面
|
||||
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
|
||||
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
|
||||
_tmp->get()->width / 2);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case AV_PIX_FMT_NV12: {
|
||||
//TODO: 待实现
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
|
||||
break;
|
||||
}
|
||||
}
|
||||
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
_channels.push_back(chn);
|
||||
}
|
||||
|
||||
void StackPlayer::play()
|
||||
{
|
||||
|
||||
auto url = _url;
|
||||
//创建拉流 解码对象
|
||||
_player = std::make_shared<mediakit::MediaPlayer>();
|
||||
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
|
||||
|
||||
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
|
||||
|
||||
(*_player)[mediakit::Client::kWaitTrackReady] = false;
|
||||
(*_player)[mediakit::Client::kRtpType] = mediakit::Rtsp::RTP_TCP;
|
||||
|
||||
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
|
||||
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
|
||||
auto strongPlayer = weakPlayer.lock();
|
||||
if (!strongPlayer) {
|
||||
return;
|
||||
}
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!ex) {
|
||||
// 取消定时器
|
||||
self->_timer.reset();
|
||||
self->_failedCount = 0;
|
||||
|
||||
} else {
|
||||
self->onDisconnect();
|
||||
self->rePlay(url);
|
||||
}
|
||||
|
||||
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
|
||||
//auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
|
||||
|
||||
if (videoTrack) {
|
||||
//TODO:添加使用显卡还是cpu解码的判断逻辑
|
||||
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
|
||||
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
|
||||
|
||||
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
|
||||
self->onFrame(frame);
|
||||
});
|
||||
|
||||
videoTrack->addDelegate((std::function<bool(const mediakit::Frame::Ptr&)>)[decoder](const mediakit::Frame::Ptr& frame) {
|
||||
return decoder->inputFrame(frame, false, true);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
|
||||
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
|
||||
auto strongPlayer = weakPlayer.lock();
|
||||
if (!strongPlayer) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
|
||||
self->onDisconnect();
|
||||
|
||||
self->rePlay(url);
|
||||
});
|
||||
|
||||
_player->play(url);
|
||||
}
|
||||
|
||||
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
for (auto& weak_chn : _channels) {
|
||||
if (auto chn = weak_chn.lock()) {
|
||||
chn->onFrame(frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void StackPlayer::onDisconnect()
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
for (auto& weak_chn : _channels) {
|
||||
if (auto chn = weak_chn.lock()) {
|
||||
auto frame = VideoStackManager::Instance().getBgImg();
|
||||
chn->onFrame(frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void StackPlayer::rePlay(const std::string& url)
|
||||
{
|
||||
_failedCount++;
|
||||
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000)); //步进延迟 重试间隔
|
||||
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
|
||||
_timer = std::make_shared<toolkit::Timer>(
|
||||
delay / 1000.0f, [weakSelf, url]() {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
}
|
||||
WarnL << "replay [" << self->_failedCount << "]:" << url;
|
||||
self->_player->play(url);
|
||||
return false;
|
||||
},
|
||||
nullptr);
|
||||
}
|
||||
|
||||
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
|
||||
: _id(id)
|
||||
, _width(width)
|
||||
, _height(height)
|
||||
, _pixfmt(pixfmt)
|
||||
, _fps(fps)
|
||||
, _bitRate(bitRate)
|
||||
{
|
||||
|
||||
_buffer = std::make_shared<mediakit::FFmpegFrame>();
|
||||
|
||||
_buffer->get()->width = _width;
|
||||
_buffer->get()->height = _height;
|
||||
_buffer->get()->format = _pixfmt;
|
||||
|
||||
av_frame_get_buffer(_buffer->get(), 32);
|
||||
|
||||
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
|
||||
|
||||
mediakit::VideoInfo info;
|
||||
info.codecId = mediakit::CodecH264;
|
||||
info.iWidth = _width;
|
||||
info.iHeight = _height;
|
||||
info.iFrameRate = _fps;
|
||||
info.iBitRate = _bitRate;
|
||||
|
||||
_dev->initVideo(info);
|
||||
//dev->initAudio(); //TODO:音频
|
||||
_dev->addTrackCompleted();
|
||||
|
||||
_isExit = false;
|
||||
}
|
||||
|
||||
VideoStack::~VideoStack()
|
||||
{
|
||||
_isExit = true;
|
||||
if (_thread.joinable()) {
|
||||
_thread.join();
|
||||
}
|
||||
}
|
||||
|
||||
void VideoStack::setParam(const Params& params)
|
||||
{
|
||||
if (_params) {
|
||||
for (auto& p : (*_params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
p->weak_buf.reset();
|
||||
}
|
||||
}
|
||||
|
||||
initBgColor();
|
||||
for (auto& p : (*params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
p->weak_buf = _buffer;
|
||||
if (auto chn = p->weak_chn.lock()) {
|
||||
chn->addParam(p);
|
||||
chn->fillBuffer(p);
|
||||
}
|
||||
}
|
||||
_params = params;
|
||||
}
|
||||
|
||||
void VideoStack::start()
|
||||
{
|
||||
_thread = std::thread([&]() {
|
||||
uint64_t pts = 0;
|
||||
int frameInterval = 1000 / _fps;
|
||||
auto lastEncTP = std::chrono::steady_clock::now();
|
||||
while (!_isExit) {
|
||||
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
|
||||
lastEncTP = std::chrono::steady_clock::now();
|
||||
|
||||
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
|
||||
pts += frameInterval;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void VideoStack::initBgColor()
|
||||
{
|
||||
//填充底色
|
||||
auto R = 20;
|
||||
auto G = 20;
|
||||
auto B = 20;
|
||||
|
||||
double Y = RGB_TO_Y(R, G, B);
|
||||
double U = RGB_TO_U(R, G, B);
|
||||
double V = RGB_TO_V(R, G, B);
|
||||
|
||||
memset(_buffer->get()->data[0], Y, _buffer->get()->linesize[0] * _height);
|
||||
memset(_buffer->get()->data[1], U, _buffer->get()->linesize[1] * _height / 2);
|
||||
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
|
||||
}
|
||||
|
||||
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt)
|
||||
{
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
||||
auto it = _channelMap.find(key);
|
||||
if (it != _channelMap.end()) {
|
||||
return it->second->acquire();
|
||||
}
|
||||
|
||||
return createChannel(id, width, height, pixfmt);
|
||||
}
|
||||
|
||||
void VideoStackManager::unrefChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt)
|
||||
{
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
||||
auto chn_it = _channelMap.find(key);
|
||||
if (chn_it != _channelMap.end() && chn_it->second->dispose()) {
|
||||
_channelMap.erase(chn_it);
|
||||
|
||||
auto player_it = _playerMap.find(id);
|
||||
if (player_it != _playerMap.end() && player_it->second->dispose()) {
|
||||
_playerMap.erase(player_it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int VideoStackManager::startVideoStack(const Json::Value& json)
|
||||
{
|
||||
|
||||
std::string id;
|
||||
int width, height;
|
||||
auto params = parseParams(json, id, width, height);
|
||||
|
||||
if (!params) {
|
||||
ErrorL << "Videostack parse params failed!";
|
||||
return -1;
|
||||
}
|
||||
|
||||
auto stack = std::make_shared<VideoStack>(id, width, height);
|
||||
|
||||
for (auto& p : (*params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
||||
}
|
||||
|
||||
stack->setParam(params);
|
||||
stack->start();
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
_stackMap[id] = stack;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoStackManager::resetVideoStack(const Json::Value& json)
|
||||
{
|
||||
std::string id;
|
||||
int width, height;
|
||||
auto params = parseParams(json, id, width, height);
|
||||
|
||||
if (!params) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
VideoStack::Ptr stack;
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto it = _stackMap.find(id);
|
||||
if (it == _stackMap.end()) {
|
||||
return -2;
|
||||
}
|
||||
stack = it->second;
|
||||
}
|
||||
|
||||
for (auto& p : (*params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
||||
}
|
||||
|
||||
stack->setParam(params);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoStackManager::stopVideoStack(const std::string& id)
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto it = _stackMap.find(id);
|
||||
if (it != _stackMap.end()) {
|
||||
_stackMap.erase(it);
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
|
||||
{
|
||||
return _bgImg;
|
||||
}
|
||||
|
||||
Params VideoStackManager::parseParams(const Json::Value& json,
|
||||
std::string& id,
|
||||
int& width,
|
||||
int& height)
|
||||
{
|
||||
try {
|
||||
id = json["id"].asString();
|
||||
|
||||
width = json["width"].asInt();
|
||||
height = json["height"].asInt();
|
||||
|
||||
int rows = json["row"].asInt(); //堆叠行数
|
||||
int cols = json["col"].asInt(); //堆叠列数
|
||||
float gapv = json["gapv"].asFloat(); //垂直间距
|
||||
float gaph = json["gaph"].asFloat(); //水平间距
|
||||
|
||||
//单个间距
|
||||
int gaphPix = static_cast<int>(std::round(width * gaph));
|
||||
int gapvPix = static_cast<int>(std::round(height * gapv));
|
||||
|
||||
// 根据间距计算格子宽高
|
||||
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
|
||||
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
|
||||
|
||||
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
|
||||
|
||||
for (int row = 0; row < rows; row++) {
|
||||
for (int col = 0; col < cols; col++) {
|
||||
std::string url = json["url"][row][col].asString();
|
||||
|
||||
auto param = std::make_shared<Param>();
|
||||
param->posX = gridWidth * col + col * gaphPix;
|
||||
param->posY = gridHeight * row + row * gapvPix;
|
||||
param->width = gridWidth;
|
||||
param->height = gridHeight;
|
||||
param->id = url;
|
||||
|
||||
(*params)[row * cols + col] = param;
|
||||
}
|
||||
}
|
||||
|
||||
//判断是否需要合并格子 (焦点屏)
|
||||
if (!json["span"].empty() && json.isMember("span")) {
|
||||
for (const auto& subArray : json["span"]) {
|
||||
if (!subArray.isArray() || subArray.size() != 2) {
|
||||
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
|
||||
}
|
||||
std::array<int, 4> mergePos;
|
||||
int index = 0;
|
||||
|
||||
for (const auto& innerArray : subArray) {
|
||||
if (!innerArray.isArray() || innerArray.size() != 2) {
|
||||
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
|
||||
}
|
||||
for (const auto& number : innerArray) {
|
||||
if (index < mergePos.size()) {
|
||||
mergePos[index++] = number.asInt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
|
||||
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
|
||||
if (i == mergePos[0] && j == mergePos[1]) {
|
||||
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
|
||||
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
|
||||
} else {
|
||||
(*params)[i * cols + j] = nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return params;
|
||||
} catch (const std::exception& e) {
|
||||
ErrorL << "Videostack parse params failed! " << e.what();
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool VideoStackManager::loadBgImg(const std::string& path)
|
||||
{
|
||||
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
|
||||
|
||||
_bgImg->get()->width = 1280;
|
||||
_bgImg->get()->height = 720;
|
||||
_bgImg->get()->format = AV_PIX_FMT_YUV420P;
|
||||
|
||||
av_frame_get_buffer(_bgImg->get(), 32);
|
||||
|
||||
std::ifstream file(path, std::ios::binary);
|
||||
if (!file.is_open()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
|
||||
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
|
||||
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
|
||||
return true;
|
||||
}
|
||||
|
||||
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt)
|
||||
{
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
StackPlayer::Ptr player;
|
||||
auto it = _playerMap.find(id);
|
||||
if (it != _playerMap.end()) {
|
||||
player = it->second->acquire();
|
||||
} else {
|
||||
player = createPlayer(id);
|
||||
}
|
||||
|
||||
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
|
||||
auto chn = refChn->acquire();
|
||||
player->addChannel(chn);
|
||||
|
||||
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
|
||||
return chn;
|
||||
}
|
||||
|
||||
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
|
||||
_playerMap[id] = refPlayer;
|
||||
|
||||
auto player = refPlayer->acquire();
|
||||
if (!id.empty()) {
|
||||
player->play();
|
||||
}
|
||||
|
||||
return player;
|
||||
}
|
||||
#endif
|
207
server/VideoStack.h
Normal file
207
server/VideoStack.h
Normal file
@ -0,0 +1,207 @@
|
||||
#pragma once
|
||||
#if defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
|
||||
#include "Codec/Transcode.h"
|
||||
#include "Common/Device.h"
|
||||
#include "Player/MediaPlayer.h"
|
||||
#include "json/json.h"
|
||||
#include <mutex>
|
||||
template <typename T>
|
||||
class RefWrapper {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<RefWrapper<T>>;
|
||||
|
||||
template <typename... Args>
|
||||
explicit RefWrapper(Args&&... args)
|
||||
: _rc(0)
|
||||
, _entity(std::forward<Args>(args)...)
|
||||
{
|
||||
}
|
||||
|
||||
T acquire()
|
||||
{
|
||||
++_rc;
|
||||
return _entity;
|
||||
}
|
||||
|
||||
bool dispose() { return --_rc <= 0; }
|
||||
|
||||
private:
|
||||
T _entity;
|
||||
std::atomic<int> _rc;
|
||||
};
|
||||
|
||||
class Channel;
|
||||
|
||||
struct Param {
|
||||
using Ptr = std::shared_ptr<Param>;
|
||||
|
||||
int posX = 0;
|
||||
int posY = 0;
|
||||
int width = 0;
|
||||
int height = 0;
|
||||
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
|
||||
std::string id {};
|
||||
|
||||
// runtime
|
||||
std::weak_ptr<Channel> weak_chn;
|
||||
std::weak_ptr<mediakit::FFmpegFrame> weak_buf;
|
||||
|
||||
~Param();
|
||||
};
|
||||
|
||||
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
|
||||
|
||||
class Channel : public std::enable_shared_from_this<Channel> {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<Channel>;
|
||||
|
||||
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||
|
||||
void addParam(const std::weak_ptr<Param>& p);
|
||||
|
||||
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
|
||||
|
||||
void fillBuffer(const Param::Ptr& p);
|
||||
|
||||
protected:
|
||||
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
|
||||
|
||||
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
|
||||
|
||||
private:
|
||||
std::string _id;
|
||||
int _width;
|
||||
int _height;
|
||||
AVPixelFormat _pixfmt;
|
||||
|
||||
mediakit::FFmpegFrame::Ptr _tmp;
|
||||
|
||||
std::recursive_mutex _mx;
|
||||
std::vector<std::weak_ptr<Param>> _params;
|
||||
|
||||
mediakit::FFmpegSws::Ptr _sws;
|
||||
};
|
||||
|
||||
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<StackPlayer>;
|
||||
|
||||
StackPlayer(const std::string& url)
|
||||
: _url(url)
|
||||
{
|
||||
}
|
||||
|
||||
void addChannel(const std::weak_ptr<Channel>& chn);
|
||||
|
||||
void play();
|
||||
|
||||
void onFrame(const mediakit::FFmpegFrame::Ptr& frame);
|
||||
|
||||
void onDisconnect();
|
||||
|
||||
protected:
|
||||
void rePlay(const std::string& url);
|
||||
|
||||
private:
|
||||
std::string _url;
|
||||
mediakit::MediaPlayer::Ptr _player;
|
||||
|
||||
//用于断线重连
|
||||
toolkit::Timer::Ptr _timer;
|
||||
int _failedCount = 0;
|
||||
|
||||
std::recursive_mutex _mx;
|
||||
std::vector<std::weak_ptr<Channel>> _channels;
|
||||
};
|
||||
|
||||
class VideoStack {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<VideoStack>;
|
||||
|
||||
VideoStack(const std::string& url,
|
||||
int width = 1920,
|
||||
int height = 1080,
|
||||
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
|
||||
float fps = 25.0,
|
||||
int bitRate = 2 * 1024 * 1024);
|
||||
|
||||
~VideoStack();
|
||||
|
||||
void setParam(const Params& params);
|
||||
|
||||
void start();
|
||||
|
||||
protected:
|
||||
void initBgColor();
|
||||
|
||||
public:
|
||||
Params _params;
|
||||
|
||||
mediakit::FFmpegFrame::Ptr _buffer;
|
||||
|
||||
private:
|
||||
std::string _id;
|
||||
int _width;
|
||||
int _height;
|
||||
AVPixelFormat _pixfmt;
|
||||
float _fps;
|
||||
int _bitRate;
|
||||
|
||||
mediakit::DevChannel::Ptr _dev;
|
||||
|
||||
bool _isExit;
|
||||
|
||||
std::thread _thread;
|
||||
};
|
||||
|
||||
class VideoStackManager {
|
||||
public:
|
||||
static VideoStackManager& Instance();
|
||||
|
||||
Channel::Ptr getChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt);
|
||||
|
||||
void unrefChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt);
|
||||
|
||||
int startVideoStack(const Json::Value& json);
|
||||
|
||||
int resetVideoStack(const Json::Value& json);
|
||||
|
||||
int stopVideoStack(const std::string& id);
|
||||
|
||||
bool loadBgImg(const std::string& path);
|
||||
|
||||
mediakit::FFmpegFrame::Ptr getBgImg();
|
||||
|
||||
protected:
|
||||
Params parseParams(const Json::Value& json,
|
||||
std::string& id,
|
||||
int& width,
|
||||
int& height);
|
||||
|
||||
protected:
|
||||
Channel::Ptr createChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt);
|
||||
|
||||
StackPlayer::Ptr createPlayer(const std::string& id);
|
||||
|
||||
private:
|
||||
mediakit::FFmpegFrame::Ptr _bgImg;
|
||||
|
||||
private:
|
||||
std::recursive_mutex _mx;
|
||||
|
||||
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
|
||||
|
||||
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
|
||||
|
||||
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
|
||||
};
|
||||
#endif
|
@ -62,6 +62,10 @@
|
||||
#include "version.h"
|
||||
#endif
|
||||
|
||||
#if defined(ENABLE_X264) && defined (ENABLE_FFMPEG)
|
||||
#include "VideoStack.h"
|
||||
#endif
|
||||
|
||||
using namespace std;
|
||||
using namespace Json;
|
||||
using namespace toolkit;
|
||||
@ -297,22 +301,71 @@ static inline void addHttpListener(){
|
||||
});
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
class ServiceController {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<Type>;
|
||||
std::unordered_map<std::string, Pointer> _map;
|
||||
mutable std::recursive_mutex _mtx;
|
||||
|
||||
void clear() {
|
||||
decltype(_map) copy;
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
copy.swap(_map);
|
||||
}
|
||||
}
|
||||
|
||||
size_t erase(const std::string &key) {
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
return _map.erase(key);
|
||||
}
|
||||
|
||||
Pointer find(const std::string &key) const {
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
auto it = _map.find(key);
|
||||
if (it == _map.end()) {
|
||||
return nullptr;
|
||||
}
|
||||
return it->second;
|
||||
}
|
||||
|
||||
template<class ..._Args>
|
||||
Pointer make(const std::string &key, _Args&& ...__args) {
|
||||
// assert(!find(key));
|
||||
|
||||
auto server = std::make_shared<Type>(std::forward<_Args>(__args)...);
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
auto it = _map.emplace(key, server);
|
||||
assert(it.second);
|
||||
return server;
|
||||
}
|
||||
|
||||
template<class ..._Args>
|
||||
Pointer makeWithAction(const std::string &key, function<void(Pointer)> action, _Args&& ...__args) {
|
||||
// assert(!find(key));
|
||||
|
||||
auto server = std::make_shared<Type>(std::forward<_Args>(__args)...);
|
||||
action(server);
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
auto it = _map.emplace(key, server);
|
||||
assert(it.second);
|
||||
return server;
|
||||
}
|
||||
};
|
||||
|
||||
//拉流代理器列表
|
||||
static unordered_map<string, PlayerProxy::Ptr> s_proxyMap;
|
||||
static recursive_mutex s_proxyMapMtx;
|
||||
static ServiceController<PlayerProxy> s_player_proxy;
|
||||
|
||||
//推流代理器列表
|
||||
static unordered_map<string, PusherProxy::Ptr> s_proxyPusherMap;
|
||||
static recursive_mutex s_proxyPusherMapMtx;
|
||||
static ServiceController<PusherProxy> s_pusher_proxy;
|
||||
|
||||
//FFmpeg拉流代理器列表
|
||||
static unordered_map<string, FFmpegSource::Ptr> s_ffmpegMap;
|
||||
static recursive_mutex s_ffmpegMapMtx;
|
||||
static ServiceController<FFmpegSource> s_ffmpeg_src;
|
||||
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
//rtp服务器列表
|
||||
static unordered_map<string, RtpServer::Ptr> s_rtpServerMap;
|
||||
static recursive_mutex s_rtpServerMapMtx;
|
||||
static ServiceController<RtpServer> s_rtp_server;
|
||||
#endif
|
||||
|
||||
static inline string getProxyKey(const string &vhost, const string &app, const string &stream) {
|
||||
@ -415,47 +468,24 @@ Value makeMediaSourceJson(MediaSource &media){
|
||||
}
|
||||
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex) {
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
if (s_rtpServerMap.find(stream_id) != s_rtpServerMap.end()) {
|
||||
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
|
||||
if (s_rtp_server.find(stream_id)) {
|
||||
//为了防止RtpProcess所有权限混乱的问题,不允许重复添加相同的stream_id
|
||||
return 0;
|
||||
}
|
||||
|
||||
RtpServer::Ptr server = std::make_shared<RtpServer>();
|
||||
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_audio, multiplex);
|
||||
auto server = s_rtp_server.makeWithAction(stream_id, [&](RtpServer::Ptr server) {
|
||||
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
|
||||
});
|
||||
server->setOnDetach([stream_id]() {
|
||||
//设置rtp超时移除事件
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
s_rtpServerMap.erase(stream_id);
|
||||
s_rtp_server.erase(stream_id);
|
||||
});
|
||||
|
||||
//保存对象
|
||||
s_rtpServerMap.emplace(stream_id, server);
|
||||
//回复json
|
||||
return server->getPort();
|
||||
}
|
||||
|
||||
void connectRtpServer(const string &stream_id, const string &dst_url, uint16_t dst_port, const function<void(const SockException &ex)> &cb) {
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
auto it = s_rtpServerMap.find(stream_id);
|
||||
if (it == s_rtpServerMap.end()) {
|
||||
cb(SockException(Err_other, "未找到rtp服务"));
|
||||
return;
|
||||
}
|
||||
it->second->connectToServer(dst_url, dst_port, cb);
|
||||
}
|
||||
|
||||
bool closeRtpServer(const string &stream_id) {
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
auto it = s_rtpServerMap.find(stream_id);
|
||||
if (it == s_rtpServerMap.end()) {
|
||||
return false;
|
||||
}
|
||||
auto server = it->second;
|
||||
s_rtpServerMap.erase(it);
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
void getStatisticJson(const function<void(Value &val)> &cb) {
|
||||
@ -546,15 +576,13 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
|
||||
const ProtocolOption &option, int rtp_type, float timeout_sec, const mINI &args,
|
||||
const function<void(const SockException &ex, const string &key)> &cb) {
|
||||
auto key = getProxyKey(vhost, app, stream);
|
||||
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
|
||||
if (s_proxyMap.find(key) != s_proxyMap.end()) {
|
||||
if (s_player_proxy.find(key)) {
|
||||
//已经在拉流了
|
||||
cb(SockException(Err_other, "This stream already exists"), key);
|
||||
return;
|
||||
}
|
||||
//添加拉流代理
|
||||
auto player = std::make_shared<PlayerProxy>(vhost, app, stream, option, retry_count);
|
||||
s_proxyMap[key] = player;
|
||||
auto player = s_player_proxy.make(key, vhost, app, stream, option, retry_count);
|
||||
|
||||
// 先透传参数
|
||||
player->mINI::operator=(args);
|
||||
@ -562,7 +590,7 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
|
||||
//指定RTP over TCP(播放rtsp时有效)
|
||||
(*player)[Client::kRtpType] = rtp_type;
|
||||
|
||||
if (timeout_sec > 0.1) {
|
||||
if (timeout_sec > 0.1f) {
|
||||
//播放握手超时时间
|
||||
(*player)[Client::kTimeoutMS] = timeout_sec * 1000;
|
||||
}
|
||||
@ -570,20 +598,68 @@ void addStreamProxy(const string &vhost, const string &app, const string &stream
|
||||
//开始播放,如果播放失败或者播放中止,将会自动重试若干次,默认一直重试
|
||||
player->setPlayCallbackOnce([cb, key](const SockException &ex) {
|
||||
if (ex) {
|
||||
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
|
||||
s_proxyMap.erase(key);
|
||||
s_player_proxy.erase(key);
|
||||
}
|
||||
cb(ex, key);
|
||||
});
|
||||
|
||||
//被主动关闭拉流
|
||||
player->setOnClose([key](const SockException &ex) {
|
||||
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
|
||||
s_proxyMap.erase(key);
|
||||
s_player_proxy.erase(key);
|
||||
});
|
||||
player->play(url);
|
||||
};
|
||||
|
||||
|
||||
void addStreamPusherProxy(const string &schema,
|
||||
const string &vhost,
|
||||
const string &app,
|
||||
const string &stream,
|
||||
const string &url,
|
||||
int retry_count,
|
||||
int rtp_type,
|
||||
float timeout_sec,
|
||||
const function<void(const SockException &ex, const string &key)> &cb) {
|
||||
auto key = getPusherKey(schema, vhost, app, stream, url);
|
||||
auto src = MediaSource::find(schema, vhost, app, stream);
|
||||
if (!src) {
|
||||
cb(SockException(Err_other, "can not find the source stream"), key);
|
||||
return;
|
||||
}
|
||||
if (s_pusher_proxy.find(key)) {
|
||||
//已经在推流了
|
||||
cb(SockException(Err_success), key);
|
||||
return;
|
||||
}
|
||||
|
||||
//添加推流代理
|
||||
auto pusher = s_pusher_proxy.make(key, src, retry_count);
|
||||
|
||||
//指定RTP over TCP(播放rtsp时有效)
|
||||
pusher->emplace(Client::kRtpType, rtp_type);
|
||||
|
||||
if (timeout_sec > 0.1f) {
|
||||
//推流握手超时时间
|
||||
pusher->emplace(Client::kTimeoutMS, timeout_sec * 1000);
|
||||
}
|
||||
|
||||
//开始推流,如果推流失败或者推流中止,将会自动重试若干次,默认一直重试
|
||||
pusher->setPushCallbackOnce([cb, key, url](const SockException &ex) {
|
||||
if (ex) {
|
||||
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
|
||||
s_pusher_proxy.erase(key);
|
||||
}
|
||||
cb(ex, key);
|
||||
});
|
||||
|
||||
//被主动关闭推流
|
||||
pusher->setOnClose([key, url](const SockException &ex) {
|
||||
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
|
||||
s_pusher_proxy.erase(key);
|
||||
});
|
||||
pusher->publish(url);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
static void getArgsValue(const HttpAllArgs<ApiArgsType> &allArgs, const string &key, Type &value) {
|
||||
auto val = allArgs[key];
|
||||
@ -973,59 +1049,6 @@ void installWebApi() {
|
||||
val["count_hit"] = (Json::UInt64)count_hit;
|
||||
});
|
||||
|
||||
static auto addStreamPusherProxy = [](const string &schema,
|
||||
const string &vhost,
|
||||
const string &app,
|
||||
const string &stream,
|
||||
const string &url,
|
||||
int retry_count,
|
||||
int rtp_type,
|
||||
float timeout_sec,
|
||||
const function<void(const SockException &ex, const string &key)> &cb) {
|
||||
auto key = getPusherKey(schema, vhost, app, stream, url);
|
||||
auto src = MediaSource::find(schema, vhost, app, stream);
|
||||
if (!src) {
|
||||
cb(SockException(Err_other, "can not find the source stream"), key);
|
||||
return;
|
||||
}
|
||||
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
|
||||
if (s_proxyPusherMap.find(key) != s_proxyPusherMap.end()) {
|
||||
//已经在推流了
|
||||
cb(SockException(Err_success), key);
|
||||
return;
|
||||
}
|
||||
|
||||
//添加推流代理
|
||||
auto pusher = std::make_shared<PusherProxy>(src, retry_count);
|
||||
s_proxyPusherMap[key] = pusher;
|
||||
|
||||
//指定RTP over TCP(播放rtsp时有效)
|
||||
(*pusher)[Client::kRtpType] = rtp_type;
|
||||
|
||||
if (timeout_sec > 0.1) {
|
||||
//推流握手超时时间
|
||||
(*pusher)[Client::kTimeoutMS] = timeout_sec * 1000;
|
||||
}
|
||||
|
||||
//开始推流,如果推流失败或者推流中止,将会自动重试若干次,默认一直重试
|
||||
pusher->setPushCallbackOnce([cb, key, url](const SockException &ex) {
|
||||
if (ex) {
|
||||
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
|
||||
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
|
||||
s_proxyPusherMap.erase(key);
|
||||
}
|
||||
cb(ex, key);
|
||||
});
|
||||
|
||||
//被主动关闭推流
|
||||
pusher->setOnClose([key, url](const SockException &ex) {
|
||||
WarnL << "Push " << url << " failed, key: " << key << ", err: " << ex;
|
||||
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
|
||||
s_proxyPusherMap.erase(key);
|
||||
});
|
||||
pusher->publish(url);
|
||||
};
|
||||
|
||||
//动态添加rtsp/rtmp推流代理
|
||||
//测试url http://127.0.0.1/index/api/addStreamPusherProxy?schema=rtmp&vhost=__defaultVhost__&app=proxy&stream=0&dst_url=rtmp://127.0.0.1/live/obs
|
||||
api_regist("/index/api/addStreamPusherProxy", [](API_ARGS_MAP_ASYNC) {
|
||||
@ -1058,8 +1081,7 @@ void installWebApi() {
|
||||
api_regist("/index/api/delStreamPusherProxy", [](API_ARGS_MAP) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("key");
|
||||
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
|
||||
val["data"]["flag"] = s_proxyPusherMap.erase(allArgs["key"]) == 1;
|
||||
val["data"]["flag"] = s_pusher_proxy.erase(allArgs["key"]) == 1;
|
||||
});
|
||||
|
||||
//动态添加rtsp/rtmp拉流代理
|
||||
@ -1100,8 +1122,7 @@ void installWebApi() {
|
||||
api_regist("/index/api/delStreamProxy",[](API_ARGS_MAP){
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("key");
|
||||
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
|
||||
val["data"]["flag"] = s_proxyMap.erase(allArgs["key"]) == 1;
|
||||
val["data"]["flag"] = s_player_proxy.erase(allArgs["key"]) == 1;
|
||||
});
|
||||
|
||||
static auto addFFmpegSource = [](const string &ffmpeg_cmd_key,
|
||||
@ -1112,25 +1133,21 @@ void installWebApi() {
|
||||
bool enable_mp4,
|
||||
const function<void(const SockException &ex, const string &key)> &cb) {
|
||||
auto key = MD5(dst_url).hexdigest();
|
||||
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
|
||||
if (s_ffmpegMap.find(key) != s_ffmpegMap.end()) {
|
||||
if (s_ffmpeg_src.find(key)) {
|
||||
//已经在拉流了
|
||||
cb(SockException(Err_success), key);
|
||||
return;
|
||||
}
|
||||
|
||||
FFmpegSource::Ptr ffmpeg = std::make_shared<FFmpegSource>();
|
||||
s_ffmpegMap[key] = ffmpeg;
|
||||
auto ffmpeg = s_ffmpeg_src.make(key);
|
||||
|
||||
ffmpeg->setOnClose([key]() {
|
||||
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
|
||||
s_ffmpegMap.erase(key);
|
||||
s_ffmpeg_src.erase(key);
|
||||
});
|
||||
ffmpeg->setupRecordFlag(enable_hls, enable_mp4);
|
||||
ffmpeg->play(ffmpeg_cmd_key, src_url, dst_url, timeout_ms, [cb, key](const SockException &ex) {
|
||||
if (ex) {
|
||||
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
|
||||
s_ffmpegMap.erase(key);
|
||||
s_ffmpeg_src.erase(key);
|
||||
}
|
||||
cb(ex, key);
|
||||
});
|
||||
@ -1164,8 +1181,7 @@ void installWebApi() {
|
||||
api_regist("/index/api/delFFmpegSource",[](API_ARGS_MAP){
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("key");
|
||||
lock_guard<decltype(s_ffmpegMapMtx)> lck(s_ffmpegMapMtx);
|
||||
val["data"]["flag"] = s_ffmpegMap.erase(allArgs["key"]) == 1;
|
||||
val["data"]["flag"] = s_ffmpeg_src.erase(allArgs["key"]) == 1;
|
||||
});
|
||||
|
||||
//新增http api下载可执行程序文件接口
|
||||
@ -1198,12 +1214,17 @@ void installWebApi() {
|
||||
//兼容老版本请求,新版本去除enable_tcp参数并新增tcp_mode参数
|
||||
tcp_mode = 1;
|
||||
}
|
||||
auto only_track = allArgs["only_track"].as<int>();
|
||||
if (allArgs["only_audio"].as<bool>()) {
|
||||
// 兼容老版本请求,新版本去除only_audio参数并新增only_track参数
|
||||
only_track = 1;
|
||||
}
|
||||
std::string local_ip = "::";
|
||||
if (!allArgs["local_ip"].empty()) {
|
||||
local_ip = allArgs["local_ip"];
|
||||
}
|
||||
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
|
||||
allArgs["ssrc"].as<uint32_t>(), allArgs["only_audio"].as<bool>());
|
||||
allArgs["ssrc"].as<uint32_t>(), only_track);
|
||||
if (port == 0) {
|
||||
throw InvalidArgsException("该stream_id已存在");
|
||||
}
|
||||
@ -1220,11 +1241,16 @@ void installWebApi() {
|
||||
// 兼容老版本请求,新版本去除enable_tcp参数并新增tcp_mode参数
|
||||
tcp_mode = 1;
|
||||
}
|
||||
auto only_track = allArgs["only_track"].as<int>();
|
||||
if (allArgs["only_audio"].as<bool>()) {
|
||||
// 兼容老版本请求,新版本去除only_audio参数并新增only_track参数
|
||||
only_track = 1;
|
||||
}
|
||||
std::string local_ip = "::";
|
||||
if (!allArgs["local_ip"].empty()) {
|
||||
local_ip = allArgs["local_ip"];
|
||||
}
|
||||
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, allArgs["only_audio"].as<bool>(),true);
|
||||
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, only_track,true);
|
||||
if (port == 0) {
|
||||
throw InvalidArgsException("该stream_id已存在");
|
||||
}
|
||||
@ -1235,22 +1261,27 @@ void installWebApi() {
|
||||
api_regist("/index/api/connectRtpServer", [](API_ARGS_MAP_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id", "dst_url", "dst_port");
|
||||
connectRtpServer(
|
||||
allArgs["stream_id"], allArgs["dst_url"], allArgs["dst_port"],
|
||||
[val, headerOut, invoker](const SockException &ex) mutable {
|
||||
if (ex) {
|
||||
val["code"] = API::OtherFailed;
|
||||
val["msg"] = ex.what();
|
||||
}
|
||||
invoker(200, headerOut, val.toStyledString());
|
||||
});
|
||||
auto cb = [val, headerOut, invoker](const SockException &ex) mutable {
|
||||
if (ex) {
|
||||
val["code"] = API::OtherFailed;
|
||||
val["msg"] = ex.what();
|
||||
}
|
||||
invoker(200, headerOut, val.toStyledString());
|
||||
};
|
||||
|
||||
auto server = s_rtp_server.find(allArgs["stream_id"]);
|
||||
if (!server) {
|
||||
cb(SockException(Err_other, "未找到rtp服务"));
|
||||
return;
|
||||
}
|
||||
server->connectToServer(allArgs["dst_url"], allArgs["dst_port"], cb);
|
||||
});
|
||||
|
||||
api_regist("/index/api/closeRtpServer",[](API_ARGS_MAP){
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id");
|
||||
|
||||
if(!closeRtpServer(allArgs["stream_id"])){
|
||||
if(s_rtp_server.erase(allArgs["stream_id"]) == 0){
|
||||
val["hit"] = 0;
|
||||
return;
|
||||
}
|
||||
@ -1261,19 +1292,18 @@ void installWebApi() {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id", "ssrc");
|
||||
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
auto it = s_rtpServerMap.find(allArgs["stream_id"]);
|
||||
if (it == s_rtpServerMap.end()) {
|
||||
auto server = s_rtp_server.find(allArgs["stream_id"]);
|
||||
if (!server) {
|
||||
throw ApiRetException("RtpServer not found by stream_id", API::NotFound);
|
||||
}
|
||||
it->second->updateSSRC(allArgs["ssrc"]);
|
||||
server->updateSSRC(allArgs["ssrc"]);
|
||||
});
|
||||
|
||||
api_regist("/index/api/listRtpServer",[](API_ARGS_MAP){
|
||||
CHECK_SECRET();
|
||||
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
for (auto &pr : s_rtpServerMap) {
|
||||
std::lock_guard<std::recursive_mutex> lck(s_rtp_server._mtx);
|
||||
for (auto &pr : s_rtp_server._map) {
|
||||
Value obj;
|
||||
obj["stream_id"] = pr.first;
|
||||
obj["port"] = pr.second->getPort();
|
||||
@ -1289,9 +1319,10 @@ void installWebApi() {
|
||||
if (!src) {
|
||||
throw ApiRetException("can not find the source stream", API::NotFound);
|
||||
}
|
||||
auto type = allArgs["type"].as<int>();
|
||||
if (!allArgs["use_ps"].empty()) {
|
||||
// 兼容之前的use_ps参数
|
||||
allArgs["type"] = allArgs["use_ps"].as<int>();
|
||||
type = allArgs["use_ps"].as<int>();
|
||||
}
|
||||
MediaSourceEvent::SendRtpArgs args;
|
||||
args.passive = false;
|
||||
@ -1302,11 +1333,11 @@ void installWebApi() {
|
||||
args.is_udp = allArgs["is_udp"];
|
||||
args.src_port = allArgs["src_port"];
|
||||
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
|
||||
args.type = (MediaSourceEvent::SendRtpArgs::Type)(allArgs["type"].as<int>());
|
||||
args.type = (MediaSourceEvent::SendRtpArgs::Type)type;
|
||||
args.only_audio = allArgs["only_audio"].as<bool>();
|
||||
args.udp_rtcp_timeout = allArgs["udp_rtcp_timeout"];
|
||||
args.recv_stream_id = allArgs["recv_stream_id"];
|
||||
TraceL << "startSendRtp, pt " << int(args.pt) << " rtp type " << args.type << " audio " << args.only_audio;
|
||||
TraceL << "startSendRtp, pt " << int(args.pt) << " rtp type " << type << " audio " << args.only_audio;
|
||||
|
||||
src->getOwnerPoller()->async([=]() mutable {
|
||||
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
|
||||
@ -1328,10 +1359,10 @@ void installWebApi() {
|
||||
if (!src) {
|
||||
throw ApiRetException("can not find the source stream", API::NotFound);
|
||||
}
|
||||
|
||||
auto type = allArgs["type"].as<int>();
|
||||
if (!allArgs["use_ps"].empty()) {
|
||||
// 兼容之前的use_ps参数
|
||||
allArgs["type"] = allArgs["use_ps"].as<int>();
|
||||
type = allArgs["use_ps"].as<int>();
|
||||
}
|
||||
|
||||
MediaSourceEvent::SendRtpArgs args;
|
||||
@ -1340,12 +1371,12 @@ void installWebApi() {
|
||||
args.is_udp = false;
|
||||
args.src_port = allArgs["src_port"];
|
||||
args.pt = allArgs["pt"].empty() ? 96 : allArgs["pt"].as<int>();
|
||||
args.type = (MediaSourceEvent::SendRtpArgs::Type)(allArgs["type"].as<int>());
|
||||
args.type = (MediaSourceEvent::SendRtpArgs::Type)type;
|
||||
args.only_audio = allArgs["only_audio"].as<bool>();
|
||||
args.recv_stream_id = allArgs["recv_stream_id"];
|
||||
//tcp被动服务器等待链接超时时间
|
||||
args.tcp_passive_close_delay_ms = allArgs["close_delay_ms"];
|
||||
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " rtp type " << args.type << " audio " << args.only_audio;
|
||||
TraceL << "startSendRtpPassive, pt " << int(args.pt) << " rtp type " << type << " audio " << args.only_audio;
|
||||
|
||||
src->getOwnerPoller()->async([=]() mutable {
|
||||
src->startSendRtp(args, [val, headerOut, invoker](uint16_t local_port, const SockException &ex) mutable {
|
||||
@ -1507,18 +1538,11 @@ void installWebApi() {
|
||||
api_regist("/index/api/getProxyPusherInfo", [](API_ARGS_MAP_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("key");
|
||||
decltype(s_proxyPusherMap.end()) it;
|
||||
{
|
||||
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
|
||||
it = s_proxyPusherMap.find(allArgs["key"]);
|
||||
}
|
||||
|
||||
if (it == s_proxyPusherMap.end()) {
|
||||
auto pusher = s_pusher_proxy.find(allArgs["key"]);
|
||||
if (!pusher) {
|
||||
throw ApiRetException("can not find pusher", API::NotFound);
|
||||
}
|
||||
|
||||
auto pusher = it->second;
|
||||
|
||||
val["data"]["status"] = pusher->getStatus();
|
||||
val["data"]["liveSecs"] = pusher->getLiveSecs();
|
||||
val["data"]["rePublishCount"] = pusher->getRePublishCount();
|
||||
@ -1528,18 +1552,11 @@ void installWebApi() {
|
||||
api_regist("/index/api/getProxyInfo", [](API_ARGS_MAP_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("key");
|
||||
decltype(s_proxyMap.end()) it;
|
||||
{
|
||||
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
|
||||
it = s_proxyMap.find(allArgs["key"]);
|
||||
}
|
||||
|
||||
if (it == s_proxyMap.end()) {
|
||||
auto proxy = s_player_proxy.find(allArgs["key"]);
|
||||
if (!proxy) {
|
||||
throw ApiRetException("can not find the proxy", API::NotFound);
|
||||
}
|
||||
|
||||
auto proxy = it->second;
|
||||
|
||||
val["data"]["status"] = proxy->getStatus();
|
||||
val["data"]["liveSecs"] = proxy->getLiveSecs();
|
||||
val["data"]["rePullCount"] = proxy->getRePullCount();
|
||||
@ -1866,7 +1883,7 @@ void installWebApi() {
|
||||
std::set<std::string> ret;
|
||||
auto vec = toolkit::split(str, ";");
|
||||
for (auto &item : vec) {
|
||||
auto root = File::absolutePath(item, "", true);
|
||||
auto root = File::absolutePath("", item, true);
|
||||
ret.emplace(std::move(root));
|
||||
}
|
||||
return ret;
|
||||
@ -1912,34 +1929,45 @@ void installWebApi() {
|
||||
invoker(401, StrCaseMap {}, "None http access event listener");
|
||||
}
|
||||
});
|
||||
|
||||
#if defined(ENABLE_X264) && defined(ENABLE_FFMPEG)
|
||||
VideoStackManager::Instance().loadBgImg("novideo.yuv");
|
||||
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastStreamNoneReader, [](BroadcastStreamNoneReaderArgs) {
|
||||
auto id = sender.getMediaTuple().stream;
|
||||
VideoStackManager::Instance().stopVideoStack(id);
|
||||
InfoL << "VideoStack: " << id <<" stop";
|
||||
});
|
||||
|
||||
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.getArgs());
|
||||
if (!ret) {
|
||||
invoker(200, headerOut, "success");
|
||||
} else {
|
||||
invoker(200, headerOut, "failed");
|
||||
}
|
||||
});
|
||||
|
||||
api_regist("/index/api/stack/stop", [](API_ARGS_MAP_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("id");
|
||||
auto ret = VideoStackManager::Instance().stopVideoStack(allArgs["id"]);
|
||||
if (!ret) {
|
||||
invoker(200, headerOut, "success");
|
||||
} else {
|
||||
invoker(200, headerOut, "failed");
|
||||
}
|
||||
});
|
||||
#endif
|
||||
}
|
||||
|
||||
void unInstallWebApi(){
|
||||
{
|
||||
lock_guard<recursive_mutex> lck(s_proxyMapMtx);
|
||||
auto proxyMap(std::move(s_proxyMap));
|
||||
proxyMap.clear();
|
||||
}
|
||||
|
||||
{
|
||||
lock_guard<recursive_mutex> lck(s_ffmpegMapMtx);
|
||||
auto ffmpegMap(std::move(s_ffmpegMap));
|
||||
ffmpegMap.clear();
|
||||
}
|
||||
|
||||
{
|
||||
lock_guard<recursive_mutex> lck(s_proxyPusherMapMtx);
|
||||
auto proxyPusherMap(std::move(s_proxyPusherMap));
|
||||
proxyPusherMap.clear();
|
||||
}
|
||||
|
||||
{
|
||||
s_player_proxy.clear();
|
||||
s_ffmpeg_src.clear();
|
||||
s_pusher_proxy.clear();
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
RtpSelector::Instance().clear();
|
||||
lock_guard<recursive_mutex> lck(s_rtpServerMapMtx);
|
||||
auto rtpServerMap(std::move(s_rtpServerMap));
|
||||
rtpServerMap.clear();
|
||||
s_rtp_server.clear();
|
||||
#endif
|
||||
}
|
||||
|
||||
NoticeCenter::Instance().delListener(&web_api_tag);
|
||||
}
|
||||
|
@ -233,7 +233,7 @@ void installWebApi();
|
||||
void unInstallWebApi();
|
||||
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex=false);
|
||||
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
|
||||
void connectRtpServer(const std::string &stream_id, const std::string &dst_url, uint16_t dst_port, const std::function<void(const toolkit::SockException &ex)> &cb);
|
||||
bool closeRtpServer(const std::string &stream_id);
|
||||
#endif
|
||||
|
@ -392,8 +392,8 @@ int start_main(int argc,char *argv[]) {
|
||||
#endif//defined(ENABLE_WEBRTC)
|
||||
|
||||
#if defined(ENABLE_SRT)
|
||||
// srt udp服务器
|
||||
if(srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
|
||||
// srt udp服务器
|
||||
if (srtPort) { srtSrv->start<SRT::SrtSession>(srtPort); }
|
||||
#endif//defined(ENABLE_SRT)
|
||||
|
||||
} catch (std::exception &ex) {
|
||||
|
@ -133,7 +133,7 @@ void MediaSink::checkTrackIfReady() {
|
||||
}
|
||||
|
||||
GET_CONFIG(uint32_t, kMaxAddTrackMS, General::kWaitAddTrackMS);
|
||||
if (_track_map.size() == 1 && _ticker.elapsedTime() > kMaxAddTrackMS) {
|
||||
if (_track_map.size() == 1 && (_ticker.elapsedTime() > kMaxAddTrackMS || !_enable_audio)) {
|
||||
// 如果只有一个Track,那么在该Track添加后,我们最多还等待若干时间(可能后面还会添加Track)
|
||||
emitAllTrackReady();
|
||||
return;
|
||||
@ -187,6 +187,8 @@ void MediaSink::emitAllTrackReady() {
|
||||
pr.second.for_each([&](const Frame::Ptr &frame) { MediaSink::inputFrame(frame); });
|
||||
}
|
||||
_frame_unread.clear();
|
||||
} else {
|
||||
throw toolkit::SockException(toolkit::Err_shutdown, "no vaild track data");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,6 +44,7 @@ public:
|
||||
}
|
||||
|
||||
void resetTimer(const EventPoller::Ptr &poller) {
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
std::weak_ptr<FramePacedSender> weak_self = shared_from_this();
|
||||
_timer = std::make_shared<Timer>(_paced_sender_ms / 1000.0f, [weak_self]() {
|
||||
if (auto strong_self = weak_self.lock()) {
|
||||
@ -55,6 +56,7 @@ public:
|
||||
}
|
||||
|
||||
bool inputFrame(const Frame::Ptr &frame) override {
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
if (!_timer) {
|
||||
setCurrentStamp(frame->dts());
|
||||
resetTimer(EventPoller::getCurrentPoller());
|
||||
@ -66,6 +68,7 @@ public:
|
||||
|
||||
private:
|
||||
void onTick() {
|
||||
std::lock_guard<std::recursive_mutex> lck(_mtx);
|
||||
auto dst = _cache.empty() ? 0 : _cache.back().first;
|
||||
while (!_cache.empty()) {
|
||||
auto &front = _cache.front();
|
||||
@ -110,6 +113,7 @@ private:
|
||||
OnFrame _cb;
|
||||
Ticker _ticker;
|
||||
Timer::Ptr _timer;
|
||||
std::recursive_mutex _mtx;
|
||||
std::list<std::pair<uint64_t, Frame::Ptr>> _cache;
|
||||
};
|
||||
|
||||
@ -597,15 +601,17 @@ void MultiMediaSourceMuxer::resetTracks() {
|
||||
}
|
||||
}
|
||||
|
||||
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame) {
|
||||
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
|
||||
auto frame = frame_in;
|
||||
if (_option.modify_stamp != ProtocolOption::kModifyStampOff) {
|
||||
// 时间戳不采用原始的绝对时间戳
|
||||
const_cast<Frame::Ptr&>(frame) = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
|
||||
frame = std::make_shared<FrameStamp>(frame, _stamps[frame->getIndex()], _option.modify_stamp);
|
||||
}
|
||||
return _paced_sender ? _paced_sender->inputFrame(frame) : onTrackFrame_l(frame);
|
||||
}
|
||||
|
||||
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
|
||||
bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame_in) {
|
||||
auto frame = frame_in;
|
||||
bool ret = false;
|
||||
if (_rtmp) {
|
||||
ret = _rtmp->inputFrame(frame) ? true : ret;
|
||||
@ -633,7 +639,7 @@ bool MultiMediaSourceMuxer::onTrackFrame_l(const Frame::Ptr &frame) {
|
||||
}
|
||||
if (_ring) {
|
||||
// 此场景由于直接转发,可能存在切换线程引起的数据被缓存在管道,所以需要CacheAbleFrame
|
||||
const_cast<Frame::Ptr &>(frame) = Frame::getCacheAbleFrame(frame);
|
||||
frame = Frame::getCacheAbleFrame(frame);
|
||||
if (frame->getTrackType() == TrackVideo) {
|
||||
// 视频时,遇到第一帧配置帧或关键帧则标记为gop开始处
|
||||
auto video_key_pos = frame->keyFrame() || frame->configFrame();
|
||||
|
@ -294,8 +294,8 @@ void RtspUrl::setup(bool is_ssl, const string &url, const string &user, const st
|
||||
splitUrl(ip, ip, port);
|
||||
|
||||
_url = std::move(url);
|
||||
_user = strCoding::UrlDecodeComponent(std::move(user));
|
||||
_passwd = strCoding::UrlDecodeComponent(std::move(passwd));
|
||||
_user = strCoding::UrlDecode(std::move(user));
|
||||
_passwd = strCoding::UrlDecode(std::move(passwd));
|
||||
_host = std::move(ip);
|
||||
_port = port;
|
||||
_is_ssl = is_ssl;
|
||||
|
@ -297,6 +297,7 @@ const string kSampleMS = RECORD_FIELD "sampleMS";
|
||||
const string kFileBufSize = RECORD_FIELD "fileBufSize";
|
||||
const string kFastStart = RECORD_FIELD "fastStart";
|
||||
const string kFileRepeat = RECORD_FIELD "fileRepeat";
|
||||
const string kEnableFmp4 = RECORD_FIELD "enableFmp4";
|
||||
|
||||
static onceToken token([]() {
|
||||
mINI::Instance()[kAppName] = "record";
|
||||
@ -304,6 +305,7 @@ static onceToken token([]() {
|
||||
mINI::Instance()[kFileBufSize] = 64 * 1024;
|
||||
mINI::Instance()[kFastStart] = false;
|
||||
mINI::Instance()[kFileRepeat] = false;
|
||||
mINI::Instance()[kEnableFmp4] = false;
|
||||
});
|
||||
} // namespace Record
|
||||
|
||||
@ -345,6 +347,7 @@ const string kPSPT = RTP_PROXY_FIELD "ps_pt";
|
||||
const string kOpusPT = RTP_PROXY_FIELD "opus_pt";
|
||||
const string kGopCache = RTP_PROXY_FIELD "gop_cache";
|
||||
const string kRtpG711DurMs = RTP_PROXY_FIELD "rtp_g711_dur_ms";
|
||||
const string kUdpRecvSocketBuffer = RTP_PROXY_FIELD "udp_recv_socket_buffer";
|
||||
|
||||
static onceToken token([]() {
|
||||
mINI::Instance()[kDumpDir] = "";
|
||||
@ -356,6 +359,7 @@ static onceToken token([]() {
|
||||
mINI::Instance()[kOpusPT] = 100;
|
||||
mINI::Instance()[kGopCache] = 1;
|
||||
mINI::Instance()[kRtpG711DurMs] = 100;
|
||||
mINI::Instance()[kUdpRecvSocketBuffer] = 4 * 1024 * 1024;
|
||||
});
|
||||
} // namespace RtpProxy
|
||||
|
||||
|
@ -354,6 +354,8 @@ extern const std::string kFileBufSize;
|
||||
extern const std::string kFastStart;
|
||||
// mp4文件是否重头循环读取
|
||||
extern const std::string kFileRepeat;
|
||||
// mp4录制文件是否采用fmp4格式
|
||||
extern const std::string kEnableFmp4;
|
||||
} // namespace Record
|
||||
|
||||
////////////HLS相关配置///////////
|
||||
@ -400,6 +402,8 @@ extern const std::string kGopCache;
|
||||
//国标发送g711 rtp 打包时,每个包的语音时长是多少,默认是100 ms,范围为20~180ms (gb28181-2016,c.2.4规定),
|
||||
//最好为20 的倍数,程序自动向20的倍数取整
|
||||
extern const std::string kRtpG711DurMs;
|
||||
// udp recv socket buffer size
|
||||
extern const std::string kUdpRecvSocketBuffer;
|
||||
} // namespace RtpProxy
|
||||
|
||||
/**
|
||||
|
@ -72,7 +72,7 @@ void HlsMakerImp::clearCache(bool immediately, bool eof) {
|
||||
std::list<std::string> lst;
|
||||
lst.emplace_back(_path_hls);
|
||||
lst.emplace_back(_path_hls_delay);
|
||||
if (!_path_init.empty()) {
|
||||
if (!_path_init.empty() && eof) {
|
||||
lst.emplace_back(_path_init);
|
||||
}
|
||||
for (auto &pr : _segment_file_paths) {
|
||||
|
@ -31,7 +31,8 @@ void MP4Muxer::openMP4(const string &file) {
|
||||
|
||||
MP4FileIO::Writer MP4Muxer::createWriter() {
|
||||
GET_CONFIG(bool, mp4FastStart, Record::kFastStart);
|
||||
return _mp4_file->createWriter(mp4FastStart ? MOV_FLAG_FASTSTART : 0, false);
|
||||
GET_CONFIG(bool, recordEnableFmp4, Record::kEnableFmp4);
|
||||
return _mp4_file->createWriter(mp4FastStart ? MOV_FLAG_FASTSTART : 0, recordEnableFmp4);
|
||||
}
|
||||
|
||||
void MP4Muxer::closeMP4() {
|
||||
|
@ -117,11 +117,13 @@ bool MP4Recorder::inputFrame(const Frame::Ptr &frame) {
|
||||
if (!(_have_video && frame->getTrackType() == TrackAudio)) {
|
||||
//如果有视频且输入的是音频,那么应该忽略切片逻辑
|
||||
if (_last_dts == 0 || _last_dts > frame->dts()) {
|
||||
//极少情况下dts时间戳可能回退
|
||||
_last_dts = frame->dts();
|
||||
//b帧情况下dts时间戳可能回退
|
||||
_last_dts = MAX(frame->dts(), _last_dts);
|
||||
}
|
||||
auto duration = 5; // 默认至少一帧5ms
|
||||
if (frame->dts() > 0 && frame->dts() > _last_dts) {
|
||||
duration = MAX(duration, frame->dts() - _last_dts);
|
||||
}
|
||||
|
||||
auto duration = frame->dts() - _last_dts;
|
||||
if (!_muxer || ((duration > _max_second * 1000) && (!_have_video || (_have_video && frame->keyFrame())))) {
|
||||
//成立条件
|
||||
// 1、_muxer为空
|
||||
|
@ -165,7 +165,14 @@ void RtmpProtocol::sendResponse(int type, const string &str) {
|
||||
|
||||
void RtmpProtocol::sendInvoke(const string &cmd, const AMFValue &val) {
|
||||
AMFEncoder enc;
|
||||
enc << cmd << ++_send_req_id << val;
|
||||
if (val.type() == AMFType::AMF_OBJECT || val.type() == AMFType::AMF_NULL)
|
||||
{
|
||||
enc << cmd << ++_send_req_id << val;
|
||||
}
|
||||
else
|
||||
{
|
||||
enc << cmd << ++_send_req_id << AMFValue() << val;
|
||||
}
|
||||
sendRequest(MSG_CMD, enc.data());
|
||||
}
|
||||
|
||||
@ -619,12 +626,22 @@ const char* RtmpProtocol::handle_rtmp(const char *data, size_t len) {
|
||||
case 12:
|
||||
chunk_data.is_abs_stamp = true;
|
||||
chunk_data.stream_index = load_le32(header->stream_index);
|
||||
_last_stream_index = chunk_data.stream_index;
|
||||
case 8:
|
||||
chunk_data.body_size = load_be24(header->body_size);
|
||||
chunk_data.type_id = header->type_id;
|
||||
_last_body_size = chunk_data.body_size;
|
||||
_last_type_id = chunk_data.type_id;
|
||||
case 4:
|
||||
chunk_data.ts_field = load_be24(header->time_stamp);
|
||||
}
|
||||
switch (header->fmt) {
|
||||
case 2:
|
||||
chunk_data.type_id = _last_type_id;
|
||||
chunk_data.body_size = _last_body_size;
|
||||
case 1:
|
||||
chunk_data.stream_index = _last_stream_index;
|
||||
}
|
||||
|
||||
auto time_stamp = chunk_data.ts_field;
|
||||
if (chunk_data.ts_field == 0xFFFFFF) {
|
||||
|
@ -11,6 +11,7 @@
|
||||
#ifndef SRC_RTMP_RTMPPROTOCOL_H_
|
||||
#define SRC_RTMP_RTMPPROTOCOL_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <functional>
|
||||
@ -87,6 +88,9 @@ protected:
|
||||
private:
|
||||
bool _data_started = false;
|
||||
int _now_chunk_id = 0;
|
||||
uint32_t _last_stream_index = 0;
|
||||
size_t _last_body_size = 0;
|
||||
uint8_t _last_type_id = 0;
|
||||
////////////ChunkSize////////////
|
||||
size_t _chunk_size_in = DEFAULT_CHUNK_LEN;
|
||||
size_t _chunk_size_out = DEFAULT_CHUNK_LEN;
|
||||
|
@ -163,14 +163,28 @@ void RtmpPusher::send_connect() {
|
||||
}
|
||||
|
||||
void RtmpPusher::send_createStream() {
|
||||
AMFValue obj(AMF_NULL);
|
||||
sendInvoke("createStream", obj);
|
||||
addOnResultCB([this](AMFDecoder &dec) {
|
||||
//TraceL << "createStream result";
|
||||
dec.load<AMFValue>();
|
||||
_stream_index = dec.load<int>();
|
||||
send_publish();
|
||||
});
|
||||
// Workaround : 兼容较旧的 FMS3.0
|
||||
{
|
||||
{
|
||||
AMFValue obj(_stream_id);
|
||||
sendInvoke("releaseStream", obj);
|
||||
}
|
||||
{
|
||||
AMFValue obj(_stream_id);
|
||||
sendInvoke("FCPublish", obj);
|
||||
}
|
||||
}
|
||||
{
|
||||
AMFValue obj(AMF_NULL);
|
||||
sendInvoke("createStream", obj);
|
||||
addOnResultCB([this](AMFDecoder &dec) {
|
||||
//TraceL << "createStream result";
|
||||
dec.load<AMFValue>();
|
||||
_stream_index = dec.load<int>();
|
||||
send_publish();
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#define RTMP_STREAM_LIVE "live"
|
||||
|
@ -199,8 +199,8 @@ void RtpProcess::setStopCheckRtp(bool is_check){
|
||||
}
|
||||
}
|
||||
|
||||
void RtpProcess::setOnlyAudio(bool only_audio){
|
||||
_only_audio = only_audio;
|
||||
void RtpProcess::setOnlyTrack(OnlyTrack only_track) {
|
||||
_only_track = only_track;
|
||||
}
|
||||
|
||||
void RtpProcess::onDetach() {
|
||||
@ -259,8 +259,10 @@ void RtpProcess::emitOnPublish() {
|
||||
if (!option.stream_replace.empty()) {
|
||||
RtpSelector::Instance().addStreamReplace(strong_self->_media_info.stream, option.stream_replace);
|
||||
}
|
||||
if (strong_self->_only_audio) {
|
||||
strong_self->_muxer->setOnlyAudio();
|
||||
switch (strong_self->_only_track) {
|
||||
case kOnlyAudio: strong_self->_muxer->setOnlyAudio(); break;
|
||||
case kOnlyVideo: strong_self->_muxer->enableAudio(false); break;
|
||||
default: break;
|
||||
}
|
||||
strong_self->_muxer->setMediaListener(strong_self);
|
||||
strong_self->doCachedFunc();
|
||||
|
@ -24,6 +24,7 @@ public:
|
||||
friend class RtpProcessHelper;
|
||||
RtpProcess(const std::string &stream_id);
|
||||
~RtpProcess();
|
||||
enum OnlyTrack { kAll = 0, kOnlyAudio = 1, kOnlyVideo = 2 };
|
||||
|
||||
/**
|
||||
* 输入rtp
|
||||
@ -58,10 +59,10 @@ public:
|
||||
void setStopCheckRtp(bool is_check=false);
|
||||
|
||||
/**
|
||||
* 设置为单track,单音频时可以加快媒体注册速度
|
||||
* 设置为单track,单音频/单视频时可以加快媒体注册速度
|
||||
* 请在inputRtp前调用此方法,否则可能会是空操作
|
||||
*/
|
||||
void setOnlyAudio(bool only_audio);
|
||||
void setOnlyTrack(OnlyTrack only_track);
|
||||
|
||||
/**
|
||||
* flush输出缓存
|
||||
@ -93,7 +94,7 @@ private:
|
||||
void doCachedFunc();
|
||||
|
||||
private:
|
||||
bool _only_audio = false;
|
||||
OnlyTrack _only_track = kAll;
|
||||
std::string _auth_err;
|
||||
uint64_t _dts = 0;
|
||||
uint64_t _total_bytes = 0;
|
||||
|
@ -42,12 +42,12 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void setRtpServerInfo(uint16_t local_port,RtpServer::TcpMode mode,bool re_use_port,uint32_t ssrc, bool only_audio) {
|
||||
void setRtpServerInfo(uint16_t local_port, RtpServer::TcpMode mode, bool re_use_port, uint32_t ssrc, int only_track) {
|
||||
_local_port = local_port;
|
||||
_tcp_mode = mode;
|
||||
_re_use_port = re_use_port;
|
||||
_ssrc = ssrc;
|
||||
_only_audio = only_audio;
|
||||
_only_track = only_track;
|
||||
}
|
||||
|
||||
void setOnDetach(function<void()> cb) {
|
||||
@ -61,7 +61,7 @@ public:
|
||||
void onRecvRtp(const Socket::Ptr &sock, const Buffer::Ptr &buf, struct sockaddr *addr) {
|
||||
if (!_process) {
|
||||
_process = RtpSelector::Instance().getProcess(_stream_id, true);
|
||||
_process->setOnlyAudio(_only_audio);
|
||||
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
|
||||
_process->setOnDetach(std::move(_on_detach));
|
||||
cancelDelayTask();
|
||||
}
|
||||
@ -142,7 +142,7 @@ private:
|
||||
|
||||
private:
|
||||
bool _re_use_port = false;
|
||||
bool _only_audio = false;
|
||||
int _only_track = 0;
|
||||
uint16_t _local_port = 0;
|
||||
uint32_t _ssrc = 0;
|
||||
RtpServer::TcpMode _tcp_mode = RtpServer::NONE;
|
||||
@ -156,7 +156,7 @@ private:
|
||||
EventPoller::DelayTask::Ptr _delay_task;
|
||||
};
|
||||
|
||||
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, bool only_audio, bool multiplex) {
|
||||
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
|
||||
//创建udp服务器
|
||||
Socket::Ptr rtp_socket = Socket::createSocket(nullptr, true);
|
||||
Socket::Ptr rtcp_socket = Socket::createSocket(nullptr, true);
|
||||
@ -174,7 +174,8 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
|
||||
}
|
||||
|
||||
//设置udp socket读缓存
|
||||
SockUtil::setRecvBuf(rtp_socket->rawFD(), 4 * 1024 * 1024);
|
||||
GET_CONFIG(int, udpRecvSocketBuffer, RtpProxy::kUdpRecvSocketBuffer);
|
||||
SockUtil::setRecvBuf(rtp_socket->rawFD(), udpRecvSocketBuffer);
|
||||
|
||||
TcpServer::Ptr tcp_server;
|
||||
_tcp_mode = tcp_mode;
|
||||
@ -183,7 +184,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
|
||||
tcp_server = std::make_shared<TcpServer>(rtp_socket->getPoller());
|
||||
(*tcp_server)[RtpSession::kStreamID] = stream_id;
|
||||
(*tcp_server)[RtpSession::kSSRC] = ssrc;
|
||||
(*tcp_server)[RtpSession::kOnlyAudio] = only_audio;
|
||||
(*tcp_server)[RtpSession::kOnlyTrack] = only_track;
|
||||
if (tcp_mode == PASSIVE) {
|
||||
tcp_server->start<RtpSession>(local_port, local_ip);
|
||||
} else if (stream_id.empty()) {
|
||||
@ -200,7 +201,7 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
|
||||
//指定了流id,那么一个端口一个流(不管是否包含多个ssrc的多个流,绑定rtp源后,会筛选掉ip端口不匹配的流)
|
||||
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), stream_id);
|
||||
helper->startRtcp();
|
||||
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_audio);
|
||||
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_track);
|
||||
bool bind_peer_addr = false;
|
||||
auto ssrc_ptr = std::make_shared<uint32_t>(ssrc);
|
||||
_ssrc = ssrc_ptr;
|
||||
@ -222,7 +223,8 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
|
||||
} else {
|
||||
//单端口多线程接收多个流,根据ssrc区分流
|
||||
udp_server = std::make_shared<UdpServer>(rtp_socket->getPoller());
|
||||
(*udp_server)[RtpSession::kOnlyAudio] = only_audio;
|
||||
(*udp_server)[RtpSession::kOnlyTrack] = only_track;
|
||||
(*udp_server)[RtpSession::kUdpRecvBuffer] = udpRecvSocketBuffer;
|
||||
udp_server->start<RtpSession>(local_port, local_ip);
|
||||
rtp_socket = nullptr;
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ public:
|
||||
* @param multiplex 多路复用
|
||||
*/
|
||||
void start(uint16_t local_port, const std::string &stream_id = "", TcpMode tcp_mode = PASSIVE,
|
||||
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, bool only_audio = false, bool multiplex = false);
|
||||
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, int only_track = 0, bool multiplex = false);
|
||||
|
||||
/**
|
||||
* 连接到tcp服务(tcp主动模式)
|
||||
@ -81,7 +81,7 @@ protected:
|
||||
std::shared_ptr<RtcpHelper> _rtcp_helper;
|
||||
std::function<void()> _on_cleanup;
|
||||
|
||||
bool _only_audio = false;
|
||||
int _only_track = 0;
|
||||
//用于tcp主动模式
|
||||
TcpMode _tcp_mode = NONE;
|
||||
};
|
||||
|
@ -23,7 +23,8 @@ namespace mediakit{
|
||||
|
||||
const string RtpSession::kStreamID = "stream_id";
|
||||
const string RtpSession::kSSRC = "ssrc";
|
||||
const string RtpSession::kOnlyAudio = "only_audio";
|
||||
const string RtpSession::kOnlyTrack = "only_track";
|
||||
const string RtpSession::kUdpRecvBuffer = "udp_recv_socket_buffer";
|
||||
|
||||
void RtpSession::attachServer(const Server &server) {
|
||||
setParams(const_cast<Server &>(server));
|
||||
@ -32,7 +33,13 @@ void RtpSession::attachServer(const Server &server) {
|
||||
void RtpSession::setParams(mINI &ini) {
|
||||
_stream_id = ini[kStreamID];
|
||||
_ssrc = ini[kSSRC];
|
||||
_only_audio = ini[kOnlyAudio];
|
||||
_only_track = ini[kOnlyTrack];
|
||||
int udp_socket_buffer = ini[kUdpRecvBuffer];
|
||||
if (_is_udp) {
|
||||
// 设置udp socket读缓存
|
||||
SockUtil::setRecvBuf(getSock()->rawFD(),
|
||||
(udp_socket_buffer > 0) ? udp_socket_buffer : (4 * 1024 * 1024));
|
||||
}
|
||||
}
|
||||
|
||||
RtpSession::RtpSession(const Socket::Ptr &sock)
|
||||
@ -40,10 +47,6 @@ RtpSession::RtpSession(const Socket::Ptr &sock)
|
||||
socklen_t addr_len = sizeof(_addr);
|
||||
getpeername(sock->rawFD(), (struct sockaddr *)&_addr, &addr_len);
|
||||
_is_udp = sock->sockType() == SockNum::Sock_UDP;
|
||||
if (_is_udp) {
|
||||
// 设置udp socket读缓存
|
||||
SockUtil::setRecvBuf(getSock()->rawFD(), 4 * 1024 * 1024);
|
||||
}
|
||||
}
|
||||
|
||||
RtpSession::~RtpSession() = default;
|
||||
@ -122,7 +125,7 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
|
||||
_delay_close = true;
|
||||
return;
|
||||
}
|
||||
_process->setOnlyAudio(_only_audio);
|
||||
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
|
||||
_process->setDelegate(static_pointer_cast<RtpSession>(shared_from_this()));
|
||||
}
|
||||
try {
|
||||
|
@ -24,7 +24,8 @@ class RtpSession : public toolkit::Session, public RtpSplitter, public MediaSour
|
||||
public:
|
||||
static const std::string kStreamID;
|
||||
static const std::string kSSRC;
|
||||
static const std::string kOnlyAudio;
|
||||
static const std::string kOnlyTrack;
|
||||
static const std::string kUdpRecvBuffer;
|
||||
|
||||
RtpSession(const toolkit::Socket::Ptr &sock);
|
||||
~RtpSession() override;
|
||||
@ -51,7 +52,7 @@ private:
|
||||
bool _is_udp = false;
|
||||
bool _search_rtp = false;
|
||||
bool _search_rtp_finished = false;
|
||||
bool _only_audio = false;
|
||||
int _only_track = 0;
|
||||
uint32_t _ssrc = 0;
|
||||
toolkit::Ticker _ticker;
|
||||
std::string _stream_id;
|
||||
|
@ -352,12 +352,20 @@ public:
|
||||
}
|
||||
|
||||
void makeSockPair(std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
|
||||
auto &sock0 = pair.first;
|
||||
auto &sock1 = pair.second;
|
||||
auto sock_pair = getPortPair();
|
||||
if (!sock_pair) {
|
||||
throw runtime_error("none reserved port in pool");
|
||||
}
|
||||
makeSockPair_l(sock_pair, pair, local_ip, re_use_port, is_udp);
|
||||
|
||||
// 确保udp和tcp模式都能打开
|
||||
auto new_pair = std::make_pair(Socket::createSocket(), Socket::createSocket());
|
||||
makeSockPair_l(sock_pair, new_pair, local_ip, re_use_port, !is_udp);
|
||||
}
|
||||
|
||||
void makeSockPair_l(const std::shared_ptr<uint16_t> &sock_pair, std::pair<Socket::Ptr, Socket::Ptr> &pair, const string &local_ip, bool re_use_port, bool is_udp) {
|
||||
auto &sock0 = pair.first;
|
||||
auto &sock1 = pair.second;
|
||||
if (is_udp) {
|
||||
if (!sock0->bindUdpSock(2 * *sock_pair, local_ip.data(), re_use_port)) {
|
||||
// 分配端口失败
|
||||
|
@ -741,8 +741,7 @@ namespace RTC
|
||||
|
||||
if (!IsRunning())
|
||||
{
|
||||
MS_ERROR("cannot process data while not running");
|
||||
|
||||
MS_WARN_TAG(nullptr,"cannot process data while not running");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,6 @@
|
||||
#define RTP_CNAME "zlmediakit-rtp"
|
||||
#define RTP_LABEL "zlmediakit-label"
|
||||
#define RTP_MSLABEL "zlmediakit-mslabel"
|
||||
#define RTP_MSID RTP_MSLABEL " " RTP_LABEL
|
||||
|
||||
using namespace std;
|
||||
|
||||
@ -707,9 +706,9 @@ void WebRtcTransportImp::onCheckAnswer(RtcSession &sdp) {
|
||||
// 发送的ssrc我们随便定义,因为在发送rtp时会修改为此值
|
||||
ssrc.ssrc = m.type + RTP_SSRC_OFFSET;
|
||||
ssrc.cname = RTP_CNAME;
|
||||
ssrc.label = RTP_LABEL;
|
||||
ssrc.label = std::string(RTP_LABEL) + '-' + m.mid;
|
||||
ssrc.mslabel = RTP_MSLABEL;
|
||||
ssrc.msid = RTP_MSID;
|
||||
ssrc.msid = ssrc.mslabel + ' ' + ssrc.label;
|
||||
|
||||
if (m.getRelatedRtxPlan(m.plan[0].pt)) {
|
||||
// rtx ssrc
|
||||
|
@ -115,17 +115,10 @@
|
||||
document.getElementsByName("method").forEach((el,idx) => {
|
||||
el.checked = el.value === type;
|
||||
el.onclick = function(e) {
|
||||
let url = new URL(document.getElementById('streamUrl').value);
|
||||
const url = new URL(document.getElementById('streamUrl').value);
|
||||
url.searchParams.set("type",el.value);
|
||||
document.getElementById('streamUrl').value = url.toString();
|
||||
|
||||
if(el.value == "play"){
|
||||
recvOnly = true;
|
||||
}else if(el.value == "echo"){
|
||||
recvOnly = false;
|
||||
}else{
|
||||
recvOnly = false;
|
||||
}
|
||||
recvOnly = 'play' === el.value;
|
||||
};
|
||||
});
|
||||
|
||||
@ -145,6 +138,25 @@
|
||||
let h = parseInt(res.pop());
|
||||
let w = parseInt(res.pop());
|
||||
|
||||
const url = new URL(document.getElementById('streamUrl').value);
|
||||
const newUrl = new URL(window.location.href);
|
||||
let count = 0;
|
||||
if (url.searchParams.has('app')) {
|
||||
newUrl.searchParams.set('app', url.searchParams.get('app'));
|
||||
count++;
|
||||
}
|
||||
if (url.searchParams.has('stream')) {
|
||||
newUrl.searchParams.set('stream', url.searchParams.get('stream'));
|
||||
count++;
|
||||
}
|
||||
if (url.searchParams.has('type')) {
|
||||
newUrl.searchParams.set('type', url.searchParams.get('type'));
|
||||
count++;
|
||||
}
|
||||
if (count > 0) {
|
||||
window.history.pushState(null, null, newUrl);
|
||||
}
|
||||
|
||||
player = new ZLMRTCClient.Endpoint(
|
||||
{
|
||||
element: document.getElementById('video'),// video 标签
|
||||
|
Loading…
Reference in New Issue
Block a user