mirror of
https://github.com/ZLMediaKit/ZLMediaKit.git
synced 2024-11-25 04:08:57 +08:00
Merge branch 'master' into dev
This commit is contained in:
commit
a153d99f6d
4
.github/workflows/android.yml
vendored
4
.github/workflows/android.yml
vendored
@ -46,12 +46,14 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: 483,
|
||||
issue_number: ${{vars.VERSION_ISSUE_NO}},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
|
||||
+ '- 分支: ${{ env.BRANCH2 }}\n'
|
||||
+ '- git hash: ${{ github.sha }} \n'
|
||||
+ '- 编译日期: ${{ env.DATE }}\n'
|
||||
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
|
||||
+ '- 开启特性: 未开启openssl/webrtc/datachannel等功能\n'
|
||||
+ '- 打包ci名: ${{ github.workflow }}\n'
|
||||
})
|
||||
|
89
.github/workflows/linux.yml
vendored
89
.github/workflows/linux.yml
vendored
@ -21,33 +21,82 @@ jobs:
|
||||
ref: v2.3.0
|
||||
path: 3rdpart/libsrtp
|
||||
|
||||
- name: 下载 openssl
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: openssl/openssl
|
||||
fetch-depth: 1
|
||||
ref: OpenSSL_1_1_1
|
||||
path: 3rdpart/openssl
|
||||
|
||||
- name: 下载 usrsctp
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: sctplab/usrsctp
|
||||
fetch-depth: 1
|
||||
ref: 0.9.5.0
|
||||
path: 3rdpart/usrsctp
|
||||
|
||||
- name: 启动 Docker 容器, 在Docker 容器中执行脚本
|
||||
run: |
|
||||
docker pull centos:7
|
||||
docker run -v $(pwd):/root -w /root --rm centos:7 sh -c "
|
||||
#!/bin/bash
|
||||
set -x
|
||||
yum install -y git wget gcc gcc-c++ make unzip ffmpeg-devel libavutil-devel libswscale-devel libresample-devel usrsctp-devel
|
||||
|
||||
wget https://github.com/openssl/openssl/archive/refs/heads/OpenSSL_1_1_1-stable.zip
|
||||
unzip OpenSSL_1_1_1-stable.zip
|
||||
cd openssl-OpenSSL_1_1_1-stable
|
||||
./config no-shared --prefix=/root/release
|
||||
# Backup original CentOS-Base.repo file
|
||||
cp /etc/yum.repos.d/CentOS-Base.repo /etc/yum.repos.d/CentOS-Base.repo.backup
|
||||
|
||||
# Define new repository configuration
|
||||
cat <<EOF > /etc/yum.repos.d/CentOS-Base.repo
|
||||
[base]
|
||||
name=CentOS-7 - Base - mirrors.aliyun.com
|
||||
baseurl=http://mirrors.aliyun.com/centos/7/os/x86_64/
|
||||
gpgcheck=1
|
||||
gpgkey=http://mirrors.aliyun.com/centos/RPM-GPG-KEY-CentOS-7
|
||||
|
||||
[updates]
|
||||
name=CentOS-7 - Updates - mirrors.aliyun.com
|
||||
baseurl=http://mirrors.aliyun.com/centos/7/updates/x86_64/
|
||||
gpgcheck=1
|
||||
gpgkey=http://mirrors.aliyun.com/centos/RPM-GPG-KEY-CentOS-7
|
||||
EOF
|
||||
|
||||
# Clean yum cache and recreate it
|
||||
yum clean all
|
||||
yum makecache
|
||||
|
||||
echo \"CentOS 7 软件源已成功切换\"
|
||||
yum install -y git wget gcc gcc-c++ make
|
||||
|
||||
mkdir -p /root/install
|
||||
|
||||
cd 3rdpart/openssl
|
||||
./config no-shared --prefix=/root/install
|
||||
make -j $(nproc)
|
||||
make install
|
||||
cd ..
|
||||
|
||||
wget https://github.com/Kitware/CMake/releases/download/v3.29.5/cmake-3.29.5.tar.gz
|
||||
tar -xvf cmake-3.29.5.tar.gz
|
||||
cd cmake-3.29.5
|
||||
OPENSSL_ROOT_DIR=/root/release ./configure
|
||||
make -j $(nproc)
|
||||
make install
|
||||
cd ..
|
||||
|
||||
cd 3rdpart/libsrtp && ./configure --enable-openssl --with-openssl-dir=/root/release && make -j $(nproc) && make install
|
||||
cd ../../
|
||||
|
||||
mkdir -p linux_build && cd linux_build && cmake .. -DOPENSSL_ROOT_DIR=/root/release -DCMAKE_BUILD_TYPE=Release -DENABLE_FFMPEG=true && make -j $(nproc)
|
||||
wget https://github.com/Kitware/CMake/releases/download/v3.29.5/cmake-3.29.5.tar.gz
|
||||
tar -xf cmake-3.29.5.tar.gz
|
||||
cd cmake-3.29.5
|
||||
OPENSSL_ROOT_DIR=/root/install ./configure
|
||||
make -j $(nproc)
|
||||
make install
|
||||
cd ..
|
||||
|
||||
cd 3rdpart/usrsctp
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE=ON ..
|
||||
make -j $(nproc)
|
||||
make install
|
||||
cd ../../../
|
||||
|
||||
cd 3rdpart/libsrtp && ./configure --enable-openssl --with-openssl-dir=/root/install && make -j $(nproc) && make install
|
||||
cd ../../
|
||||
|
||||
mkdir -p linux_build && cd linux_build && cmake .. -DOPENSSL_ROOT_DIR=/root/install -DCMAKE_BUILD_TYPE=Release && make -j $(nproc)
|
||||
"
|
||||
|
||||
- name: 设置环境变量
|
||||
@ -72,13 +121,15 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: 483,
|
||||
issue_number: ${{vars.VERSION_ISSUE_NO}},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
|
||||
+ '- 分支: ${{ env.BRANCH2 }}\n'
|
||||
+ '- git hash: ${{ github.sha }} \n'
|
||||
+ '- 编译日期: ${{ env.DATE }}\n'
|
||||
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
|
||||
+ '- 打包ci名: ${{ github.workflow }}\n'
|
||||
+ '- 说明: 本二进制在centos7(x64)上编译,请确保您的机器系统不低于此版本,并提前`sudo yum check-update && sudo yum install -y openssl-devel ffmpeg-devel libavutil-devel libswscale-devel libresample-devel usrsctp-devel`安装依赖项\n'
|
||||
+ '- 开启特性: openssl/webrtc/datachannel\n'
|
||||
+ '- 说明: 本二进制在centos7(x64)上编译,请确保您的机器系统不低于此版本\n'
|
||||
})
|
||||
|
6
.github/workflows/macos.yml
vendored
6
.github/workflows/macos.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
vcpkgTriplet: arm64-osx
|
||||
# 2024.06.01
|
||||
vcpkgGitCommitId: '47364fbc300756f64f7876b549d9422d5f3ec0d3'
|
||||
vcpkgArguments: 'openssl libsrtp[openssl]'
|
||||
vcpkgArguments: 'openssl libsrtp[openssl] usrsctp'
|
||||
|
||||
- name: 编译
|
||||
uses: lukka/run-cmake@v3
|
||||
@ -52,13 +52,15 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: 483,
|
||||
issue_number: ${{vars.VERSION_ISSUE_NO}},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
|
||||
+ '- 分支: ${{ env.BRANCH2 }}\n'
|
||||
+ '- git hash: ${{ github.sha }} \n'
|
||||
+ '- 编译日期: ${{ env.DATE }}\n'
|
||||
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
|
||||
+ '- 打包ci名: ${{ github.workflow }}\n'
|
||||
+ '- 开启特性: openssl/webrtc/datachannel\n'
|
||||
+ '- 说明: 此二进制为arm64版本\n'
|
||||
})
|
6
.github/workflows/windows.yml
vendored
6
.github/workflows/windows.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
vcpkgTriplet: x64-windows-static
|
||||
# 2024.06.01
|
||||
vcpkgGitCommitId: '47364fbc300756f64f7876b549d9422d5f3ec0d3'
|
||||
vcpkgArguments: 'openssl libsrtp[openssl]'
|
||||
vcpkgArguments: 'openssl libsrtp[openssl] usrsctp'
|
||||
|
||||
- name: 编译
|
||||
uses: lukka/run-cmake@v3
|
||||
@ -54,13 +54,15 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: 483,
|
||||
issue_number: ${{vars.VERSION_ISSUE_NO}},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '- 下载地址: [${{ github.workflow }}_${{ env.BRANCH }}_${{ env.DATE }}](${{ steps.upload.outputs.artifact-url }})\n'
|
||||
+ '- 分支: ${{ env.BRANCH2 }}\n'
|
||||
+ '- git hash: ${{ github.sha }} \n'
|
||||
+ '- 编译日期: ${{ env.DATE }}\n'
|
||||
+ '- 编译记录: [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})\n'
|
||||
+ '- 打包ci名: ${{ github.workflow }}\n'
|
||||
+ '- 开启特性: openssl/webrtc/datachannel\n'
|
||||
+ '- 说明: 此二进制为x64版本\n'
|
||||
})
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 5144e2aa521df6d473308bfb31172054772a634f
|
||||
Subproject commit fb695d203421d906c473018022a736fa4a7a47e4
|
@ -1 +1 @@
|
||||
Subproject commit 527c0f5117b489fda78fcd123d446370ddd9ec9a
|
||||
Subproject commit cf83ebc62e65ae6f3b73bc5ebd06cb0b2da49fa5
|
@ -448,10 +448,21 @@ endif()
|
||||
|
||||
if(WIN32)
|
||||
update_cached_list(MK_LINK_LIBRARIES WS2_32 Iphlpapi shlwapi)
|
||||
elseif(ANDROID)
|
||||
update_cached_list(MK_LINK_LIBRARIES log)
|
||||
elseif(NOT ANDROID OR IOS)
|
||||
update_cached_list(MK_LINK_LIBRARIES pthread)
|
||||
endif()
|
||||
|
||||
if(ENABLE_VIDEOSTACK)
|
||||
if(ENABLE_FFMPEG AND ENABLE_X264)
|
||||
message(STATUS "ENABLE_VIDEOSTACK defined")
|
||||
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_VIDEOSTACK)
|
||||
else()
|
||||
message(WARNING "ENABLE_VIDEOSTACK requires ENABLE_FFMPEG and ENABLE_X264")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Solution folders:
|
||||
# ----------------------------------------------------------------------------
|
||||
|
@ -58,6 +58,8 @@ API_EXPORT const char *API_CALL mk_record_info_get_stream(const mk_record_info c
|
||||
///////////////////////////////////////////Parser/////////////////////////////////////////////
|
||||
//Parser对象的C映射
|
||||
typedef struct mk_parser_t *mk_parser;
|
||||
//Parser对象中Headers foreach回调
|
||||
typedef void(API_CALL *on_mk_parser_header_cb)(void *user_data, const char *key, const char *val);
|
||||
//Parser::Method(),获取命令字,譬如GET/POST
|
||||
API_EXPORT const char* API_CALL mk_parser_get_method(const mk_parser ctx);
|
||||
//Parser::Url(),获取HTTP的访问url(不包括?后面的参数)
|
||||
@ -72,6 +74,8 @@ API_EXPORT const char* API_CALL mk_parser_get_tail(const mk_parser ctx);
|
||||
API_EXPORT const char* API_CALL mk_parser_get_header(const mk_parser ctx,const char *key);
|
||||
//Parser::Content(),获取HTTP body
|
||||
API_EXPORT const char* API_CALL mk_parser_get_content(const mk_parser ctx, size_t *length);
|
||||
//循环获取所有header
|
||||
API_EXPORT void API_CALL mk_parser_headers_for_each(const mk_parser ctx, on_mk_parser_header_cb cb, void *user_data);
|
||||
|
||||
///////////////////////////////////////////MediaInfo/////////////////////////////////////////////
|
||||
//MediaInfo对象的C映射
|
||||
@ -114,19 +118,24 @@ API_EXPORT int API_CALL mk_media_source_get_total_reader_count(const mk_media_so
|
||||
API_EXPORT int API_CALL mk_media_source_get_track_count(const mk_media_source ctx);
|
||||
// copy track reference by index from MediaSource, please use mk_track_unref to release it
|
||||
API_EXPORT mk_track API_CALL mk_media_source_get_track(const mk_media_source ctx, int index);
|
||||
// MediaSource::Track:loss
|
||||
API_EXPORT float API_CALL mk_media_source_get_track_loss(const mk_media_source ctx, const mk_track track);
|
||||
// MediaSource::broadcastMessage
|
||||
API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx, const char *msg, size_t len);
|
||||
// MediaSource::getOriginUrl()
|
||||
API_EXPORT const char* API_CALL mk_media_source_get_origin_url(const mk_media_source ctx);
|
||||
// MediaSource::getOriginType()
|
||||
API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source ctx);
|
||||
// MediaSource::getOriginTypeStr(), 使用后请用mk_free释放返回值
|
||||
API_EXPORT const char *API_CALL mk_media_source_get_origin_type_str(const mk_media_source ctx);
|
||||
// MediaSource::getCreateStamp()
|
||||
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx);
|
||||
// MediaSource::isRecording() 0:hls,1:MP4
|
||||
API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx, int type);
|
||||
|
||||
|
||||
|
||||
// MediaSource::getBytesSpeed()
|
||||
API_EXPORT int API_CALL mk_media_source_get_bytes_speed(const mk_media_source ctx);
|
||||
// MediaSource::getAliveSecond()
|
||||
API_EXPORT uint64_t API_CALL mk_media_source_get_alive_second(const mk_media_source ctx);
|
||||
/**
|
||||
* 直播源在ZLMediaKit中被称作为MediaSource,
|
||||
* 目前支持3种,分别是RtmpMediaSource、RtspMediaSource、HlsMediaSource
|
||||
|
@ -79,7 +79,7 @@ API_EXPORT void API_CALL mk_proxy_player_release(mk_proxy_player ctx);
|
||||
/**
|
||||
* 设置代理播放器配置选项
|
||||
* @param ctx 代理播放器指针
|
||||
* @param key 配置项键,支持 net_adapter/rtp_type/rtsp_user/rtsp_pwd/protocol_timeout_ms/media_timeout_ms/beat_interval_ms
|
||||
* @param key 配置项键,支持 net_adapter/rtp_type/rtsp_user/rtsp_pwd/protocol_timeout_ms/media_timeout_ms/beat_interval_ms/rtsp_speed
|
||||
* @param val 配置项值,如果是整形,需要转换成统一转换成string
|
||||
*/
|
||||
API_EXPORT void API_CALL mk_proxy_player_set_option(mk_proxy_player ctx, const char *key, const char *val);
|
||||
|
@ -24,6 +24,7 @@ typedef struct mk_rtp_server_t *mk_rtp_server;
|
||||
* @return
|
||||
*/
|
||||
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int tcp_mode, const char *stream_id);
|
||||
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create2(uint16_t port, int tcp_mode, const char *vhost, const char *app, const char *stream_id);
|
||||
|
||||
/**
|
||||
* TCP 主动模式时连接到服务器是否成功的回调
|
||||
|
@ -73,6 +73,21 @@ API_EXPORT const char* API_CALL mk_track_codec_name(mk_track track);
|
||||
*/
|
||||
API_EXPORT int API_CALL mk_track_bit_rate(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取轨道是否已就绪,1: 已就绪,0:未就绪
|
||||
*/
|
||||
API_EXPORT int API_CALL mk_track_ready(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取累计帧数
|
||||
*/
|
||||
API_EXPORT uint64_t API_CALL mk_track_frames(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取时间,单位毫秒
|
||||
*/
|
||||
API_EXPORT uint64_t API_CALL mk_track_duration(mk_track track);
|
||||
|
||||
/**
|
||||
* 监听frame输出事件
|
||||
* @param track track对象
|
||||
@ -114,6 +129,21 @@ API_EXPORT int API_CALL mk_track_video_height(mk_track track);
|
||||
*/
|
||||
API_EXPORT int API_CALL mk_track_video_fps(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取视频累计关键帧数
|
||||
*/
|
||||
API_EXPORT uint64_t API_CALL mk_track_video_key_frames(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取视频GOP关键帧间隔
|
||||
*/
|
||||
API_EXPORT int API_CALL mk_track_video_gop_size(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取视频累计关键帧间隔(毫秒)
|
||||
*/
|
||||
API_EXPORT int API_CALL mk_track_video_gop_interval_ms(mk_track track);
|
||||
|
||||
/**
|
||||
* 获取音频采样率
|
||||
*/
|
||||
|
@ -125,6 +125,13 @@ API_EXPORT const char* API_CALL mk_parser_get_content(const mk_parser ctx, size_
|
||||
}
|
||||
return parser->content().c_str();
|
||||
}
|
||||
API_EXPORT void API_CALL mk_parser_headers_for_each(const mk_parser ctx, on_mk_parser_header_cb cb, void *user_data){
|
||||
assert(ctx && cb);
|
||||
Parser *parser = (Parser *)ctx;
|
||||
for (auto it = parser->getHeader().begin(); it != parser->getHeader().end(); ++it) {
|
||||
cb(user_data, it->first.c_str(), it->second.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////MediaInfo/////////////////////////////////////////////
|
||||
API_EXPORT const char* API_CALL mk_media_info_get_params(const mk_media_info ctx){
|
||||
@ -218,6 +225,13 @@ API_EXPORT mk_track API_CALL mk_media_source_get_track(const mk_media_source ctx
|
||||
return (mk_track) new Track::Ptr(std::move(tracks[index]));
|
||||
}
|
||||
|
||||
API_EXPORT float API_CALL mk_media_source_get_track_loss(const mk_media_source ctx, const mk_track track) {
|
||||
assert(ctx);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
// rtp推流只有一个统计器,但是可能有多个track,如果短时间多次获取间隔丢包率,第二次会获取为-1
|
||||
return src->getLossRate((*((Track::Ptr *)track))->getTrackType());
|
||||
}
|
||||
|
||||
API_EXPORT int API_CALL mk_media_source_broadcast_msg(const mk_media_source ctx, const char *msg, size_t len) {
|
||||
assert(ctx && msg && len);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
@ -240,6 +254,12 @@ API_EXPORT int API_CALL mk_media_source_get_origin_type(const mk_media_source c
|
||||
return static_cast<int>(src->getOriginType());
|
||||
}
|
||||
|
||||
API_EXPORT const char* API_CALL mk_media_source_get_origin_type_str(const mk_media_source ctx) {
|
||||
assert(ctx);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
return _strdup(getOriginTypeString(src->getOriginType()).c_str());
|
||||
}
|
||||
|
||||
API_EXPORT uint64_t API_CALL mk_media_source_get_create_stamp(const mk_media_source ctx) {
|
||||
assert(ctx);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
@ -252,6 +272,19 @@ API_EXPORT int API_CALL mk_media_source_is_recording(const mk_media_source ctx,i
|
||||
return src->isRecording((Recorder::type)type);
|
||||
}
|
||||
|
||||
API_EXPORT int API_CALL mk_media_source_get_bytes_speed(const mk_media_source ctx) {
|
||||
assert(ctx);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
return src->getBytesSpeed();
|
||||
}
|
||||
|
||||
API_EXPORT uint64_t API_CALL mk_media_source_get_alive_second(const mk_media_source ctx) {
|
||||
assert(ctx);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
return src->getAliveSecond();
|
||||
}
|
||||
|
||||
|
||||
API_EXPORT int API_CALL mk_media_source_close(const mk_media_source ctx,int force){
|
||||
assert(ctx);
|
||||
MediaSource *src = (MediaSource *)ctx;
|
||||
|
@ -15,7 +15,7 @@
|
||||
using namespace mediakit;
|
||||
|
||||
extern "C" {
|
||||
#define XX(name, type, value, str, mpeg_id, mp4_id) API_EXPORT const int MK##name = value;
|
||||
#define XX(name, type, value, str, mpeg_id, mp4_id) const int MK##name = value;
|
||||
CODEC_MAP(XX)
|
||||
#undef XX
|
||||
}
|
||||
|
@ -28,7 +28,8 @@ API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create3(const char *vhost, c
|
||||
ProtocolOption option;
|
||||
option.enable_hls = hls_enabled;
|
||||
option.enable_mp4 = mp4_enabled;
|
||||
PlayerProxy::Ptr *obj(new PlayerProxy::Ptr(new PlayerProxy(vhost, app, stream, option, retry_count)));
|
||||
MediaTuple tuple = {vhost, app, stream, ""};
|
||||
PlayerProxy::Ptr *obj(new PlayerProxy::Ptr(new PlayerProxy(tuple, option, retry_count)));
|
||||
return (mk_proxy_player)obj;
|
||||
}
|
||||
|
||||
@ -36,7 +37,8 @@ API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create3(const char *vhost, c
|
||||
API_EXPORT mk_proxy_player API_CALL mk_proxy_player_create4(const char *vhost, const char *app, const char *stream, mk_ini ini, int retry_count) {
|
||||
assert(vhost && app && stream);
|
||||
ProtocolOption option(*((mINI *)ini));
|
||||
PlayerProxy::Ptr *obj(new PlayerProxy::Ptr(new PlayerProxy(vhost, app, stream, option, retry_count)));
|
||||
MediaTuple tuple = {vhost, app, stream, ""};
|
||||
PlayerProxy::Ptr *obj(new PlayerProxy::Ptr(new PlayerProxy(tuple, option, retry_count)));
|
||||
return (mk_proxy_player)obj;
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,13 @@ using namespace mediakit;
|
||||
|
||||
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int tcp_mode, const char *stream_id) {
|
||||
RtpServer::Ptr *server = new RtpServer::Ptr(new RtpServer);
|
||||
(*server)->start(port, stream_id, (RtpServer::TcpMode)tcp_mode);
|
||||
(*server)->start(port, MediaTuple { DEFAULT_VHOST, kRtpAppName, stream_id, "" }, (RtpServer::TcpMode)tcp_mode);
|
||||
return (mk_rtp_server)server;
|
||||
}
|
||||
|
||||
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create2(uint16_t port, int tcp_mode, const char *vhost, const char *app, const char *stream_id) {
|
||||
RtpServer::Ptr *server = new RtpServer::Ptr(new RtpServer);
|
||||
(*server)->start(port, MediaTuple { vhost, app, stream_id, "" }, (RtpServer::TcpMode)tcp_mode);
|
||||
return (mk_rtp_server)server;
|
||||
}
|
||||
|
||||
@ -71,6 +77,11 @@ API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create(uint16_t port, int enable
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
API_EXPORT mk_rtp_server API_CALL mk_rtp_server_create2(uint16_t port, int tcp_mode, const char *vhost, const char *app, const char *stream_id) {
|
||||
WarnL << "请打开ENABLE_RTPPROXY后再编译";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
API_EXPORT void API_CALL mk_rtp_server_release(mk_rtp_server ctx) {
|
||||
WarnL << "请打开ENABLE_RTPPROXY后再编译";
|
||||
}
|
||||
|
@ -109,6 +109,21 @@ API_EXPORT int API_CALL mk_track_bit_rate(mk_track track) {
|
||||
return (*((Track::Ptr *) track))->getBitRate();
|
||||
}
|
||||
|
||||
API_EXPORT int API_CALL mk_track_ready(mk_track track) {
|
||||
assert(track);
|
||||
return (*((Track::Ptr *)track))->ready();
|
||||
}
|
||||
|
||||
API_EXPORT uint64_t API_CALL mk_track_frames(mk_track track) {
|
||||
assert(track);
|
||||
return (*((Track::Ptr *)track))->getFrames();
|
||||
}
|
||||
|
||||
API_EXPORT uint64_t API_CALL mk_track_duration(mk_track track) {
|
||||
assert(track);
|
||||
return (*((Track::Ptr *)track))->getDuration();
|
||||
}
|
||||
|
||||
API_EXPORT void *API_CALL mk_track_add_delegate(mk_track track, on_mk_frame_out cb, void *user_data) {
|
||||
return mk_track_add_delegate2(track, cb, user_data, nullptr);
|
||||
}
|
||||
@ -167,6 +182,36 @@ API_EXPORT int API_CALL mk_track_video_fps(mk_track track) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
API_EXPORT uint64_t API_CALL mk_track_video_key_frames(mk_track track) {
|
||||
assert(track);
|
||||
auto video = dynamic_pointer_cast<VideoTrack>((*((Track::Ptr *)track)));
|
||||
if (video) {
|
||||
return video->getVideoFps();
|
||||
}
|
||||
WarnL << "not video track";
|
||||
return 0;
|
||||
}
|
||||
|
||||
API_EXPORT int API_CALL mk_track_video_gop_size(mk_track track) {
|
||||
assert(track);
|
||||
auto video = dynamic_pointer_cast<VideoTrack>((*((Track::Ptr *)track)));
|
||||
if (video) {
|
||||
return video->getVideoGopSize();
|
||||
}
|
||||
WarnL << "not video track";
|
||||
return 0;
|
||||
}
|
||||
|
||||
API_EXPORT int API_CALL mk_track_video_gop_interval_ms(mk_track track) {
|
||||
assert(track);
|
||||
auto video = dynamic_pointer_cast<VideoTrack>((*((Track::Ptr *)track)));
|
||||
if (video) {
|
||||
return video->getVideoGopInterval();
|
||||
}
|
||||
WarnL << "not video track";
|
||||
return 0;
|
||||
}
|
||||
|
||||
API_EXPORT int API_CALL mk_track_audio_sample_rate(mk_track track) {
|
||||
assert(track);
|
||||
auto audio = dynamic_pointer_cast<AudioTrack>((*((Track::Ptr *) track)));
|
||||
|
@ -29,9 +29,8 @@ static void on_h264_frame(void *user_data, mk_h264_splitter splitter, const char
|
||||
#else
|
||||
usleep(40 * 1000);
|
||||
#endif
|
||||
static int dts = 0;
|
||||
uint64_t dts = mk_util_get_current_millisecond();
|
||||
mk_frame frame = mk_frame_create(MKCodecH264, dts, dts, data, size, NULL, NULL);
|
||||
dts += 40;
|
||||
mk_media_input_frame((mk_media) user_data, frame);
|
||||
mk_frame_unref(frame);
|
||||
}
|
||||
|
@ -369,13 +369,13 @@ start_bitrate=0
|
||||
max_bitrate=0
|
||||
min_bitrate=0
|
||||
|
||||
#nack接收端
|
||||
#Nack缓存包最早时间间隔
|
||||
maxNackMS=5000
|
||||
#Nack包检查间隔(包数量)
|
||||
rtpCacheCheckInterval=100
|
||||
#nack接收端, rtp发送端,zlm发送rtc流
|
||||
#rtp重发缓存列队最大长度,单位毫秒
|
||||
maxRtpCacheMS=5000
|
||||
#rtp重发缓存列队最大长度,单位个数
|
||||
maxRtpCacheSize=2048
|
||||
|
||||
#nack发送端
|
||||
#nack发送端,rtp接收端,zlm接收rtc推流
|
||||
#最大保留的rtp丢包状态个数
|
||||
nackMaxSize=2048
|
||||
#rtp丢包状态最长保留时间
|
||||
|
@ -86,27 +86,26 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtp, bool key_pos) {
|
||||
}
|
||||
|
||||
// 每个audio unit时间戳增量
|
||||
auto dts_inc = (stamp - _last_dts) / au_header_count;
|
||||
if (dts_inc < 0 && dts_inc > 100) {
|
||||
auto dts_inc = static_cast<int64_t>(stamp - _last_dts) / au_header_count;
|
||||
if (dts_inc < 0 || dts_inc > 100) {
|
||||
// 时间戳增量异常,忽略
|
||||
dts_inc = 0;
|
||||
}
|
||||
|
||||
for (int i = 0; i < au_header_count; ++i) {
|
||||
for (auto i = 0u; i < (size_t)au_header_count; ++i) {
|
||||
// 之后的2字节是AU_HEADER,其中高13位表示一帧AAC负载的字节长度,低3位无用
|
||||
uint16_t size = ((au_header_ptr[0] << 8) | au_header_ptr[1]) >> 3;
|
||||
if (ptr + size > end) {
|
||||
//数据不够
|
||||
auto size = ((au_header_ptr[0] << 8) | au_header_ptr[1]) >> 3;
|
||||
auto len = std::min<int>(size, end - ptr);
|
||||
if (len <= 0) {
|
||||
break;
|
||||
}
|
||||
_frame->_buffer.append((char *)ptr, len);
|
||||
ptr += len;
|
||||
au_header_ptr += 2;
|
||||
|
||||
if (size) {
|
||||
//设置aac数据
|
||||
_frame->_buffer.assign((char *) ptr, size);
|
||||
if (_frame->size() >= (size_t)size) {
|
||||
// 设置当前audio unit时间戳
|
||||
_frame->_dts = _last_dts + i * dts_inc;
|
||||
ptr += size;
|
||||
au_header_ptr += 2;
|
||||
flushData();
|
||||
}
|
||||
}
|
||||
|
@ -40,7 +40,7 @@ bool G711RtpEncoder::inputFrame(const Frame::Ptr &frame) {
|
||||
auto remain_size = len;
|
||||
size_t max_size = 160 * _channels * _pkt_dur_ms / 20; // 20 ms per 160 byte
|
||||
size_t n = 0;
|
||||
bool mark = true;
|
||||
bool mark = false;
|
||||
while (remain_size >= max_size) {
|
||||
assert(remain_size >= max_size);
|
||||
const size_t rtp_size = max_size;
|
||||
|
@ -117,7 +117,7 @@ size_t prefixSize(const char *ptr, size_t len) {
|
||||
H264Track::H264Track(const string &sps, const string &pps, int sps_prefix_len, int pps_prefix_len) {
|
||||
_sps = sps.substr(sps_prefix_len);
|
||||
_pps = pps.substr(pps_prefix_len);
|
||||
update();
|
||||
H264Track::update();
|
||||
}
|
||||
|
||||
CodecId H264Track::getCodecId() const {
|
||||
@ -238,6 +238,14 @@ bool H264Track::update() {
|
||||
return getAVCInfo(_sps, _width, _height, _fps);
|
||||
}
|
||||
|
||||
std::vector<Frame::Ptr> H264Track::getConfigFrames() const {
|
||||
if (!ready()) {
|
||||
return {};
|
||||
}
|
||||
return { createConfigFrame<H264Frame>(_sps, 0, getIndex()),
|
||||
createConfigFrame<H264Frame>(_pps, 0, getIndex()) };
|
||||
}
|
||||
|
||||
Track::Ptr H264Track::clone() const {
|
||||
return std::make_shared<H264Track>(*this);
|
||||
}
|
||||
@ -284,23 +292,11 @@ bool H264Track::inputFrame_l(const Frame::Ptr &frame) {
|
||||
|
||||
void H264Track::insertConfigFrame(const Frame::Ptr &frame) {
|
||||
if (!_sps.empty()) {
|
||||
auto spsFrame = FrameImp::create<H264Frame>();
|
||||
spsFrame->_prefix_size = 4;
|
||||
spsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
|
||||
spsFrame->_buffer.append(_sps);
|
||||
spsFrame->_dts = frame->dts();
|
||||
spsFrame->setIndex(frame->getIndex());
|
||||
VideoTrack::inputFrame(spsFrame);
|
||||
VideoTrack::inputFrame(createConfigFrame<H264Frame>(_sps, frame->dts(), frame->getIndex()));
|
||||
}
|
||||
|
||||
if (!_pps.empty()) {
|
||||
auto ppsFrame = FrameImp::create<H264Frame>();
|
||||
ppsFrame->_prefix_size = 4;
|
||||
ppsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
|
||||
ppsFrame->_buffer.append(_pps);
|
||||
ppsFrame->_dts = frame->dts();
|
||||
ppsFrame->setIndex(frame->getIndex());
|
||||
VideoTrack::inputFrame(ppsFrame);
|
||||
VideoTrack::inputFrame(createConfigFrame<H264Frame>(_pps, frame->dts(), frame->getIndex()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -115,6 +115,7 @@ public:
|
||||
toolkit::Buffer::Ptr getExtraData() const override;
|
||||
void setExtraData(const uint8_t *data, size_t size) override;
|
||||
bool update() override;
|
||||
std::vector<Frame::Ptr> getConfigFrames() const override;
|
||||
|
||||
private:
|
||||
Sdp::Ptr getSdp(uint8_t payload_type) const override;
|
||||
@ -131,5 +132,17 @@ private:
|
||||
std::string _pps;
|
||||
};
|
||||
|
||||
template <typename FrameType>
|
||||
Frame::Ptr createConfigFrame(const std::string &data, uint64_t dts, int index) {
|
||||
auto frame = FrameImp::create<FrameType>();
|
||||
frame->_prefix_size = 4;
|
||||
frame->_buffer.assign("\x00\x00\x00\x01", 4);
|
||||
frame->_buffer.append(data);
|
||||
frame->_dts = dts;
|
||||
frame->setIndex(index);
|
||||
return frame;
|
||||
}
|
||||
|
||||
}//namespace mediakit
|
||||
|
||||
#endif //ZLMEDIAKIT_H264_H
|
||||
|
@ -66,7 +66,7 @@ H265Track::H265Track(const string &vps,const string &sps, const string &pps,int
|
||||
_vps = vps.substr(vps_prefix_len);
|
||||
_sps = sps.substr(sps_prefix_len);
|
||||
_pps = pps.substr(pps_prefix_len);
|
||||
update();
|
||||
H265Track::update();
|
||||
}
|
||||
|
||||
CodecId H265Track::getCodecId() const {
|
||||
@ -185,6 +185,15 @@ bool H265Track::update() {
|
||||
return getHEVCInfo(_vps, _sps, _width, _height, _fps);
|
||||
}
|
||||
|
||||
std::vector<Frame::Ptr> H265Track::getConfigFrames() const {
|
||||
if (!ready()) {
|
||||
return {};
|
||||
}
|
||||
return { createConfigFrame<H265Frame>(_vps, 0, getIndex()),
|
||||
createConfigFrame<H265Frame>(_sps, 0, getIndex()),
|
||||
createConfigFrame<H265Frame>(_pps, 0, getIndex()) };
|
||||
}
|
||||
|
||||
Track::Ptr H265Track::clone() const {
|
||||
return std::make_shared<H265Track>(*this);
|
||||
}
|
||||
@ -194,32 +203,13 @@ void H265Track::insertConfigFrame(const Frame::Ptr &frame) {
|
||||
return;
|
||||
}
|
||||
if (!_vps.empty()) {
|
||||
auto vpsFrame = FrameImp::create<H265Frame>();
|
||||
vpsFrame->_prefix_size = 4;
|
||||
vpsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
|
||||
vpsFrame->_buffer.append(_vps);
|
||||
vpsFrame->_dts = frame->dts();
|
||||
vpsFrame->setIndex(frame->getIndex());
|
||||
VideoTrack::inputFrame(vpsFrame);
|
||||
VideoTrack::inputFrame(createConfigFrame<H265Frame>(_vps, frame->dts(), frame->getIndex()));
|
||||
}
|
||||
if (!_sps.empty()) {
|
||||
auto spsFrame = FrameImp::create<H265Frame>();
|
||||
spsFrame->_prefix_size = 4;
|
||||
spsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
|
||||
spsFrame->_buffer.append(_sps);
|
||||
spsFrame->_dts = frame->dts();
|
||||
spsFrame->setIndex(frame->getIndex());
|
||||
VideoTrack::inputFrame(spsFrame);
|
||||
VideoTrack::inputFrame(createConfigFrame<H265Frame>(_sps, frame->dts(), frame->getIndex()));
|
||||
}
|
||||
|
||||
if (!_pps.empty()) {
|
||||
auto ppsFrame = FrameImp::create<H265Frame>();
|
||||
ppsFrame->_prefix_size = 4;
|
||||
ppsFrame->_buffer.assign("\x00\x00\x00\x01", 4);
|
||||
ppsFrame->_buffer.append(_pps);
|
||||
ppsFrame->_dts = frame->dts();
|
||||
ppsFrame->setIndex(frame->getIndex());
|
||||
VideoTrack::inputFrame(ppsFrame);
|
||||
VideoTrack::inputFrame(createConfigFrame<H265Frame>(_pps, frame->dts(), frame->getIndex()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,6 +142,7 @@ public:
|
||||
toolkit::Buffer::Ptr getExtraData() const override;
|
||||
void setExtraData(const uint8_t *data, size_t size) override;
|
||||
bool update() override;
|
||||
std::vector<Frame::Ptr> getConfigFrames() const override;
|
||||
|
||||
private:
|
||||
Sdp::Ptr getSdp(uint8_t payload_type) const override;
|
||||
|
@ -598,7 +598,7 @@ void JPEGRtpEncoder::rtpSendJpeg(const uint8_t *buf, int size, uint64_t pts, uin
|
||||
{
|
||||
const uint8_t *qtables[4] = { NULL };
|
||||
int nb_qtables = 0;
|
||||
uint8_t w, h;
|
||||
uint8_t w { 0 }, h { 0 };
|
||||
uint8_t *p;
|
||||
int off = 0; /* fragment offset of the current JPEG frame */
|
||||
int len;
|
||||
|
@ -1476,6 +1476,16 @@
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1517,6 +1527,16 @@
|
||||
"value": "1",
|
||||
"description": "tcp模式,0时为不启用tcp监听,1时为启用tcp监听,2时为tcp主动连接模式"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1582,6 +1602,16 @@
|
||||
"value": "1",
|
||||
"description": "tcp模式,0时为不启用tcp监听,1时为启用tcp监听"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1635,6 +1665,16 @@
|
||||
"value": "1",
|
||||
"description": "tcp主动模式时服务端端口"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1666,6 +1706,16 @@
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1697,6 +1747,16 @@
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1733,6 +1793,16 @@
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
@ -1764,6 +1834,16 @@
|
||||
"value": "{{ZLMediaKit_secret}}",
|
||||
"description": "api操作密钥(配置文件配置)"
|
||||
},
|
||||
{
|
||||
"key": "vhost",
|
||||
"value": "{{defaultVhost}}",
|
||||
"description": "虚拟主机,例如__defaultVhost__"
|
||||
},
|
||||
{
|
||||
"key": "app",
|
||||
"value": "rtp",
|
||||
"description": "应用名,例如 rtp"
|
||||
},
|
||||
{
|
||||
"key": "stream_id",
|
||||
"value": "test",
|
||||
|
@ -23,18 +23,10 @@
|
||||
|
||||
INSTANCE_IMP(VideoStackManager)
|
||||
|
||||
Param::~Param()
|
||||
{
|
||||
VideoStackManager::Instance().unrefChannel(
|
||||
id, width, height, pixfmt);
|
||||
}
|
||||
Param::~Param() { VideoStackManager::Instance().unrefChannel(id, width, height, pixfmt); }
|
||||
|
||||
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
|
||||
: _id(id)
|
||||
, _width(width)
|
||||
, _height(height)
|
||||
, _pixfmt(pixfmt)
|
||||
{
|
||||
: _id(id), _width(width), _height(height), _pixfmt(pixfmt) {
|
||||
_tmp = std::make_shared<mediakit::FFmpegFrame>();
|
||||
|
||||
_tmp->get()->width = _width;
|
||||
@ -53,64 +45,52 @@ Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pix
|
||||
_tmp = _sws->inputFrame(frame);
|
||||
}
|
||||
|
||||
void Channel::addParam(const std::weak_ptr<Param>& p)
|
||||
{
|
||||
void Channel::addParam(const std::weak_ptr<Param>& p) {
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
_params.push_back(p);
|
||||
}
|
||||
|
||||
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
|
||||
{
|
||||
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
|
||||
std::weak_ptr<Channel> weakSelf = shared_from_this();
|
||||
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
|
||||
_poller->async([weakSelf, frame]() {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
if (!self) { return; }
|
||||
self->_tmp = self->_sws->inputFrame(frame);
|
||||
|
||||
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
|
||||
});
|
||||
}
|
||||
|
||||
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
|
||||
{
|
||||
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func) {
|
||||
for (auto& wp : _params) {
|
||||
if (auto sp = wp.lock()) {
|
||||
func(sp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Channel::fillBuffer(const Param::Ptr& p)
|
||||
{
|
||||
if (auto buf = p->weak_buf.lock()) {
|
||||
copyData(buf, p);
|
||||
if (auto sp = wp.lock()) { func(sp); }
|
||||
}
|
||||
}
|
||||
|
||||
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
|
||||
{
|
||||
void Channel::fillBuffer(const Param::Ptr& p) {
|
||||
if (auto buf = p->weak_buf.lock()) { copyData(buf, p); }
|
||||
}
|
||||
|
||||
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p) {
|
||||
|
||||
switch (p->pixfmt) {
|
||||
case AV_PIX_FMT_YUV420P: {
|
||||
for (int i = 0; i < p->height; i++) {
|
||||
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
|
||||
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
|
||||
_tmp->get()->width);
|
||||
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i, _tmp->get()->width);
|
||||
}
|
||||
// 确保height为奇数时,也能正确的复制到最后一行uv数据
|
||||
for (int i = 0; i < (p->height + 1) / 2; i++) {
|
||||
// U平面
|
||||
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
|
||||
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
|
||||
_tmp->get()->width / 2);
|
||||
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) +
|
||||
p->posX / 2,
|
||||
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i, _tmp->get()->width / 2);
|
||||
|
||||
// V平面
|
||||
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
|
||||
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
|
||||
_tmp->get()->width / 2);
|
||||
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) +
|
||||
p->posX / 2,
|
||||
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i, _tmp->get()->width / 2);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -119,19 +99,15 @@ void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr&
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
|
||||
break;
|
||||
default: WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt); break;
|
||||
}
|
||||
}
|
||||
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
|
||||
{
|
||||
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn) {
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
_channels.push_back(chn);
|
||||
}
|
||||
|
||||
void StackPlayer::play()
|
||||
{
|
||||
void StackPlayer::play() {
|
||||
|
||||
auto url = _url;
|
||||
// 创建拉流 解码对象
|
||||
@ -146,13 +122,9 @@ void StackPlayer::play()
|
||||
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
|
||||
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
|
||||
auto strongPlayer = weakPlayer.lock();
|
||||
if (!strongPlayer) {
|
||||
return;
|
||||
}
|
||||
if (!strongPlayer) { return; }
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
if (!self) { return; }
|
||||
|
||||
if (!ex) {
|
||||
// 取消定时器
|
||||
@ -164,19 +136,18 @@ void StackPlayer::play()
|
||||
self->rePlay(url);
|
||||
}
|
||||
|
||||
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
|
||||
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(
|
||||
strongPlayer->getTrack(mediakit::TrackVideo, false));
|
||||
// auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
|
||||
|
||||
if (videoTrack) {
|
||||
// TODO:添加使用显卡还是cpu解码的判断逻辑
|
||||
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
|
||||
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
|
||||
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(
|
||||
videoTrack, 0, std::vector<std::string>{"h264", "hevc"});
|
||||
|
||||
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
if (!self) { return; }
|
||||
|
||||
self->onFrame(frame);
|
||||
});
|
||||
@ -190,14 +161,10 @@ void StackPlayer::play()
|
||||
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
|
||||
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
|
||||
auto strongPlayer = weakPlayer.lock();
|
||||
if (!strongPlayer) {
|
||||
return;
|
||||
}
|
||||
if (!strongPlayer) { return; }
|
||||
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
if (!self) { return; }
|
||||
|
||||
self->onDisconnect();
|
||||
|
||||
@ -207,18 +174,14 @@ void StackPlayer::play()
|
||||
_player->play(url);
|
||||
}
|
||||
|
||||
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
|
||||
{
|
||||
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
for (auto& weak_chn : _channels) {
|
||||
if (auto chn = weak_chn.lock()) {
|
||||
chn->onFrame(frame);
|
||||
}
|
||||
if (auto chn = weak_chn.lock()) { chn->onFrame(frame); }
|
||||
}
|
||||
}
|
||||
|
||||
void StackPlayer::onDisconnect()
|
||||
{
|
||||
void StackPlayer::onDisconnect() {
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
for (auto& weak_chn : _channels) {
|
||||
if (auto chn = weak_chn.lock()) {
|
||||
@ -228,31 +191,22 @@ void StackPlayer::onDisconnect()
|
||||
}
|
||||
}
|
||||
|
||||
void StackPlayer::rePlay(const std::string& url)
|
||||
{
|
||||
void StackPlayer::rePlay(const std::string& url) {
|
||||
_failedCount++;
|
||||
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000));// 步进延迟 重试间隔
|
||||
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
|
||||
_timer = std::make_shared<toolkit::Timer>(
|
||||
delay / 1000.0f, [weakSelf, url]() {
|
||||
_timer = std::make_shared<toolkit::Timer>(delay / 1000.0f, [weakSelf, url]() {
|
||||
auto self = weakSelf.lock();
|
||||
if (!self) {
|
||||
}
|
||||
if (!self) {}
|
||||
WarnL << "replay [" << self->_failedCount << "]:" << url;
|
||||
self->_player->play(url);
|
||||
return false;
|
||||
},
|
||||
nullptr);
|
||||
}, nullptr);
|
||||
}
|
||||
|
||||
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
|
||||
: _id(id)
|
||||
, _width(width)
|
||||
, _height(height)
|
||||
, _pixfmt(pixfmt)
|
||||
, _fps(fps)
|
||||
, _bitRate(bitRate)
|
||||
{
|
||||
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt,
|
||||
float fps, int bitRate)
|
||||
: _id(id), _width(width), _height(height), _pixfmt(pixfmt), _fps(fps), _bitRate(bitRate) {
|
||||
|
||||
_buffer = std::make_shared<mediakit::FFmpegFrame>();
|
||||
|
||||
@ -262,7 +216,8 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
|
||||
|
||||
av_frame_get_buffer(_buffer->get(), 32);
|
||||
|
||||
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
|
||||
_dev = std::make_shared<mediakit::DevChannel>(
|
||||
mediakit::MediaTuple{DEFAULT_VHOST, "live", _id, ""});
|
||||
|
||||
mediakit::VideoInfo info;
|
||||
info.codecId = mediakit::CodecH264;
|
||||
@ -278,28 +233,22 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
|
||||
_isExit = false;
|
||||
}
|
||||
|
||||
VideoStack::~VideoStack()
|
||||
{
|
||||
VideoStack::~VideoStack() {
|
||||
_isExit = true;
|
||||
if (_thread.joinable()) {
|
||||
_thread.join();
|
||||
}
|
||||
if (_thread.joinable()) { _thread.join(); }
|
||||
}
|
||||
|
||||
void VideoStack::setParam(const Params& params)
|
||||
{
|
||||
void VideoStack::setParam(const Params& params) {
|
||||
if (_params) {
|
||||
for (auto& p : (*_params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
if (!p) continue;
|
||||
p->weak_buf.reset();
|
||||
}
|
||||
}
|
||||
|
||||
initBgColor();
|
||||
for (auto& p : (*params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
if (!p) continue;
|
||||
p->weak_buf = _buffer;
|
||||
if (auto chn = p->weak_chn.lock()) {
|
||||
chn->addParam(p);
|
||||
@ -309,14 +258,14 @@ void VideoStack::setParam(const Params& params)
|
||||
_params = params;
|
||||
}
|
||||
|
||||
void VideoStack::start()
|
||||
{
|
||||
void VideoStack::start() {
|
||||
_thread = std::thread([&]() {
|
||||
uint64_t pts = 0;
|
||||
int frameInterval = 1000 / _fps;
|
||||
auto lastEncTP = std::chrono::steady_clock::now();
|
||||
while (!_isExit) {
|
||||
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
|
||||
if (std::chrono::steady_clock::now() - lastEncTP >
|
||||
std::chrono::milliseconds(frameInterval)) {
|
||||
lastEncTP = std::chrono::steady_clock::now();
|
||||
|
||||
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
|
||||
@ -326,8 +275,7 @@ void VideoStack::start()
|
||||
});
|
||||
}
|
||||
|
||||
void VideoStack::initBgColor()
|
||||
{
|
||||
void VideoStack::initBgColor() {
|
||||
// 填充底色
|
||||
auto R = 20;
|
||||
auto G = 20;
|
||||
@ -342,27 +290,19 @@ void VideoStack::initBgColor()
|
||||
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
|
||||
}
|
||||
|
||||
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt)
|
||||
{
|
||||
Channel::Ptr VideoStackManager::getChannel(const std::string& id, int width, int height,
|
||||
AVPixelFormat pixfmt) {
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
||||
auto it = _channelMap.find(key);
|
||||
if (it != _channelMap.end()) {
|
||||
return it->second->acquire();
|
||||
}
|
||||
if (it != _channelMap.end()) { return it->second->acquire(); }
|
||||
|
||||
return createChannel(id, width, height, pixfmt);
|
||||
}
|
||||
|
||||
void VideoStackManager::unrefChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt)
|
||||
{
|
||||
void VideoStackManager::unrefChannel(const std::string& id, int width, int height,
|
||||
AVPixelFormat pixfmt) {
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
||||
@ -377,8 +317,7 @@ void VideoStackManager::unrefChannel(const std::string& id,
|
||||
}
|
||||
}
|
||||
|
||||
int VideoStackManager::startVideoStack(const Json::Value& json)
|
||||
{
|
||||
int VideoStackManager::startVideoStack(const Json::Value& json) {
|
||||
|
||||
std::string id;
|
||||
int width, height;
|
||||
@ -392,8 +331,7 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
|
||||
auto stack = std::make_shared<VideoStack>(id, width, height);
|
||||
|
||||
for (auto& p : (*params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
if (!p) continue;
|
||||
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
||||
}
|
||||
|
||||
@ -405,13 +343,13 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoStackManager::resetVideoStack(const Json::Value& json)
|
||||
{
|
||||
int VideoStackManager::resetVideoStack(const Json::Value& json) {
|
||||
std::string id;
|
||||
int width, height;
|
||||
auto params = parseParams(json, id, width, height);
|
||||
|
||||
if (!params) {
|
||||
ErrorL << "Videostack parse params failed!";
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -419,15 +357,12 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto it = _stackMap.find(id);
|
||||
if (it == _stackMap.end()) {
|
||||
return -2;
|
||||
}
|
||||
if (it == _stackMap.end()) { return -2; }
|
||||
stack = it->second;
|
||||
}
|
||||
|
||||
for (auto& p : (*params)) {
|
||||
if (!p)
|
||||
continue;
|
||||
if (!p) continue;
|
||||
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
||||
}
|
||||
|
||||
@ -435,8 +370,7 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VideoStackManager::stopVideoStack(const std::string& id)
|
||||
{
|
||||
int VideoStackManager::stopVideoStack(const std::string& id) {
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto it = _stackMap.find(id);
|
||||
if (it != _stackMap.end()) {
|
||||
@ -447,24 +381,24 @@ int VideoStackManager::stopVideoStack(const std::string& id)
|
||||
return -1;
|
||||
}
|
||||
|
||||
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
|
||||
{
|
||||
return _bgImg;
|
||||
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg() { return _bgImg; }
|
||||
|
||||
template<typename T> T getJsonValue(const Json::Value& json, const std::string& key) {
|
||||
if (!json.isMember(key)) {
|
||||
throw Json::LogicError("VideoStack parseParams missing required field: " + key);
|
||||
}
|
||||
return json[key].as<T>();
|
||||
}
|
||||
|
||||
Params VideoStackManager::parseParams(const Json::Value& json,
|
||||
std::string& id,
|
||||
int& width,
|
||||
int& height)
|
||||
{
|
||||
try {
|
||||
id = json["id"].asString();
|
||||
Params VideoStackManager::parseParams(const Json::Value& json, std::string& id, int& width,
|
||||
int& height) {
|
||||
|
||||
width = json["width"].asInt();
|
||||
height = json["height"].asInt();
|
||||
id = getJsonValue<std::string>(json, "id");
|
||||
width = getJsonValue<int>(json, "width");
|
||||
height = getJsonValue<int>(json, "height");
|
||||
int rows = getJsonValue<int>(json, "row");// 行数
|
||||
int cols = getJsonValue<int>(json, "col");// 列数
|
||||
|
||||
int rows = json["row"].asInt(); //堆叠行数
|
||||
int cols = json["col"].asInt(); //堆叠列数
|
||||
float gapv = json["gapv"].asFloat();// 垂直间距
|
||||
float gaph = json["gaph"].asFloat();// 水平间距
|
||||
|
||||
@ -494,30 +428,32 @@ Params VideoStackManager::parseParams(const Json::Value& json,
|
||||
}
|
||||
|
||||
// 判断是否需要合并格子 (焦点屏)
|
||||
if (!json["span"].empty() && json.isMember("span")) {
|
||||
if (json.isMember("span") && json["span"].isArray() && json["span"].size() > 0) {
|
||||
for (const auto& subArray : json["span"]) {
|
||||
if (!subArray.isArray() || subArray.size() != 2) {
|
||||
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
|
||||
}
|
||||
std::array<int, 4> mergePos;
|
||||
int index = 0;
|
||||
unsigned int index = 0;
|
||||
|
||||
for (const auto& innerArray : subArray) {
|
||||
if (!innerArray.isArray() || innerArray.size() != 2) {
|
||||
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
|
||||
}
|
||||
for (const auto& number : innerArray) {
|
||||
if (index < mergePos.size()) {
|
||||
mergePos[index++] = number.asInt();
|
||||
}
|
||||
if (index < mergePos.size()) { mergePos[index++] = number.asInt(); }
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
|
||||
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
|
||||
if (i == mergePos[0] && j == mergePos[1]) {
|
||||
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
|
||||
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
|
||||
(*params)[i * cols + j]->width =
|
||||
(mergePos[3] - mergePos[1] + 1) * gridWidth +
|
||||
(mergePos[3] - mergePos[1]) * gapvPix;
|
||||
(*params)[i * cols + j]->height =
|
||||
(mergePos[2] - mergePos[0] + 1) * gridHeight +
|
||||
(mergePos[2] - mergePos[0]) * gaphPix;
|
||||
} else {
|
||||
(*params)[i * cols + j] = nullptr;
|
||||
}
|
||||
@ -526,14 +462,9 @@ Params VideoStackManager::parseParams(const Json::Value& json,
|
||||
}
|
||||
}
|
||||
return params;
|
||||
} catch (const std::exception& e) {
|
||||
ErrorL << "Videostack parse params failed! " << e.what();
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool VideoStackManager::loadBgImg(const std::string& path)
|
||||
{
|
||||
bool VideoStackManager::loadBgImg(const std::string& path) {
|
||||
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
|
||||
|
||||
_bgImg->get()->width = 1280;
|
||||
@ -543,21 +474,21 @@ bool VideoStackManager::loadBgImg(const std::string& path)
|
||||
av_frame_get_buffer(_bgImg->get(), 32);
|
||||
|
||||
std::ifstream file(path, std::ios::binary);
|
||||
if (!file.is_open()) {
|
||||
return false;
|
||||
}
|
||||
if (!file.is_open()) { return false; }
|
||||
|
||||
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
|
||||
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
|
||||
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
|
||||
file.read((char*)_bgImg->get()->data[0],
|
||||
_bgImg->get()->linesize[0] * _bgImg->get()->height);// Y
|
||||
file.read((char*)_bgImg->get()->data[1],
|
||||
_bgImg->get()->linesize[1] * _bgImg->get()->height / 2);// U
|
||||
file.read((char*)_bgImg->get()->data[2],
|
||||
_bgImg->get()->linesize[2] * _bgImg->get()->height / 2);// V
|
||||
return true;
|
||||
}
|
||||
|
||||
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt)
|
||||
{
|
||||
void VideoStackManager::clear() { _stackMap.clear(); }
|
||||
|
||||
Channel::Ptr VideoStackManager::createChannel(const std::string& id, int width, int height,
|
||||
AVPixelFormat pixfmt) {
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
StackPlayer::Ptr player;
|
||||
@ -568,24 +499,24 @@ Channel::Ptr VideoStackManager::createChannel(const std::string& id,
|
||||
player = createPlayer(id);
|
||||
}
|
||||
|
||||
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
|
||||
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(
|
||||
std::make_shared<Channel>(id, width, height, pixfmt));
|
||||
auto chn = refChn->acquire();
|
||||
player->addChannel(chn);
|
||||
|
||||
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
|
||||
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] =
|
||||
refChn;
|
||||
return chn;
|
||||
}
|
||||
|
||||
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
|
||||
{
|
||||
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id) {
|
||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
|
||||
auto refPlayer =
|
||||
std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
|
||||
_playerMap[id] = refPlayer;
|
||||
|
||||
auto player = refPlayer->acquire();
|
||||
if (!id.empty()) {
|
||||
player->play();
|
||||
}
|
||||
if (!id.empty()) { player->play(); }
|
||||
|
||||
return player;
|
||||
}
|
||||
|
@ -5,20 +5,14 @@
|
||||
#include "Player/MediaPlayer.h"
|
||||
#include "json/json.h"
|
||||
#include <mutex>
|
||||
template <typename T>
|
||||
class RefWrapper {
|
||||
template<typename T> class RefWrapper {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<RefWrapper<T>>;
|
||||
|
||||
template<typename... Args>
|
||||
explicit RefWrapper(Args&&... args)
|
||||
: _rc(0)
|
||||
, _entity(std::forward<Args>(args)...)
|
||||
{
|
||||
}
|
||||
explicit RefWrapper(Args&&... args) : _rc(0), _entity(std::forward<Args>(args)...) {}
|
||||
|
||||
T acquire()
|
||||
{
|
||||
T acquire() {
|
||||
++_rc;
|
||||
return _entity;
|
||||
}
|
||||
@ -26,8 +20,8 @@ class RefWrapper {
|
||||
bool dispose() { return --_rc <= 0; }
|
||||
|
||||
private:
|
||||
T _entity;
|
||||
std::atomic<int> _rc;
|
||||
T _entity;
|
||||
};
|
||||
|
||||
class Channel;
|
||||
@ -87,10 +81,7 @@ class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<StackPlayer>;
|
||||
|
||||
StackPlayer(const std::string& url)
|
||||
: _url(url)
|
||||
{
|
||||
}
|
||||
StackPlayer(const std::string& url) : _url(url) {}
|
||||
|
||||
void addChannel(const std::weak_ptr<Channel>& chn);
|
||||
|
||||
@ -119,11 +110,8 @@ class VideoStack {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<VideoStack>;
|
||||
|
||||
VideoStack(const std::string& url,
|
||||
int width = 1920,
|
||||
int height = 1080,
|
||||
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
|
||||
float fps = 25.0,
|
||||
VideoStack(const std::string& url, int width = 1920, int height = 1080,
|
||||
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P, float fps = 25.0,
|
||||
int bitRate = 2 * 1024 * 1024);
|
||||
|
||||
~VideoStack();
|
||||
@ -157,39 +145,33 @@ class VideoStack {
|
||||
|
||||
class VideoStackManager {
|
||||
public:
|
||||
static VideoStackManager& Instance();
|
||||
|
||||
Channel::Ptr getChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt);
|
||||
|
||||
void unrefChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt);
|
||||
|
||||
// 创建拼接流
|
||||
int startVideoStack(const Json::Value& json);
|
||||
|
||||
int resetVideoStack(const Json::Value& json);
|
||||
|
||||
// 停止拼接流
|
||||
int stopVideoStack(const std::string& id);
|
||||
|
||||
// 可以在不断流的情况下,修改拼接流的配置(实现切换拼接屏内容)
|
||||
int resetVideoStack(const Json::Value& json);
|
||||
|
||||
public:
|
||||
static VideoStackManager& Instance();
|
||||
|
||||
Channel::Ptr getChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||
|
||||
void unrefChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||
|
||||
bool loadBgImg(const std::string& path);
|
||||
|
||||
void clear();
|
||||
|
||||
mediakit::FFmpegFrame::Ptr getBgImg();
|
||||
|
||||
protected:
|
||||
Params parseParams(const Json::Value& json,
|
||||
std::string& id,
|
||||
int& width,
|
||||
int& height);
|
||||
Params parseParams(const Json::Value& json, std::string& id, int& width, int& height);
|
||||
|
||||
protected:
|
||||
Channel::Ptr createChannel(const std::string& id,
|
||||
int width,
|
||||
int height,
|
||||
AVPixelFormat pixfmt);
|
||||
Channel::Ptr createChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||
|
||||
StackPlayer::Ptr createPlayer(const std::string& id);
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
* may be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <exception>
|
||||
#include <sys/stat.h>
|
||||
#include <math.h>
|
||||
#include <signal.h>
|
||||
@ -375,9 +376,6 @@ static ServiceController<FFmpegSource> s_ffmpeg_src;
|
||||
static ServiceController<RtpServer> s_rtp_server;
|
||||
#endif
|
||||
|
||||
static inline string getProxyKey(const string &vhost, const string &app, const string &stream) {
|
||||
return vhost + "/" + app + "/" + stream;
|
||||
}
|
||||
|
||||
static inline string getPusherKey(const string &schema, const string &vhost, const string &app, const string &stream,
|
||||
const string &dst_url) {
|
||||
@ -476,18 +474,19 @@ Value makeMediaSourceJson(MediaSource &media){
|
||||
}
|
||||
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
uint16_t openRtpServer(uint16_t local_port, const string &stream_id, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
|
||||
if (s_rtp_server.find(stream_id)) {
|
||||
//为了防止RtpProcess所有权限混乱的问题,不允许重复添加相同的stream_id
|
||||
uint16_t openRtpServer(uint16_t local_port, const mediakit::MediaTuple &tuple, int tcp_mode, const string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
|
||||
auto key = tuple.shortUrl();
|
||||
if (s_rtp_server.find(key)) {
|
||||
//为了防止RtpProcess所有权限混乱的问题,不允许重复添加相同的key
|
||||
return 0;
|
||||
}
|
||||
|
||||
auto server = s_rtp_server.makeWithAction(stream_id, [&](RtpServer::Ptr server) {
|
||||
server->start(local_port, stream_id, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
|
||||
auto server = s_rtp_server.makeWithAction(key, [&](RtpServer::Ptr server) {
|
||||
server->start(local_port, tuple, (RtpServer::TcpMode)tcp_mode, local_ip.c_str(), re_use_port, ssrc, only_track, multiplex);
|
||||
});
|
||||
server->setOnDetach([stream_id](const SockException &ex) {
|
||||
server->setOnDetach([key](const SockException &ex) {
|
||||
//设置rtp超时移除事件
|
||||
s_rtp_server.erase(stream_id);
|
||||
s_rtp_server.erase(key);
|
||||
});
|
||||
|
||||
//回复json
|
||||
@ -580,17 +579,17 @@ void getStatisticJson(const function<void(Value &val)> &cb) {
|
||||
#endif
|
||||
}
|
||||
|
||||
void addStreamProxy(const string &vhost, const string &app, const string &stream, const string &url, int retry_count,
|
||||
void addStreamProxy(const MediaTuple &tuple, const string &url, int retry_count,
|
||||
const ProtocolOption &option, int rtp_type, float timeout_sec, const mINI &args,
|
||||
const function<void(const SockException &ex, const string &key)> &cb) {
|
||||
auto key = getProxyKey(vhost, app, stream);
|
||||
auto key = tuple.shortUrl();
|
||||
if (s_player_proxy.find(key)) {
|
||||
//已经在拉流了
|
||||
cb(SockException(Err_other, "This stream already exists"), key);
|
||||
return;
|
||||
}
|
||||
//添加拉流代理
|
||||
auto player = s_player_proxy.make(key, vhost, app, stream, option, retry_count);
|
||||
auto player = s_player_proxy.make(key, tuple, option, retry_count);
|
||||
|
||||
// 先透传拷贝参数
|
||||
for (auto &pr : args) {
|
||||
@ -1100,9 +1099,13 @@ void installWebApi() {
|
||||
|
||||
ProtocolOption option(allArgs);
|
||||
auto retry_count = allArgs["retry_count"].empty()? -1: allArgs["retry_count"].as<int>();
|
||||
addStreamProxy(allArgs["vhost"],
|
||||
allArgs["app"],
|
||||
allArgs["stream"],
|
||||
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
auto tuple = MediaTuple { vhost, allArgs["app"], allArgs["stream"], "" };
|
||||
addStreamProxy(tuple,
|
||||
allArgs["url"],
|
||||
retry_count,
|
||||
option,
|
||||
@ -1198,7 +1201,15 @@ void installWebApi() {
|
||||
api_regist("/index/api/getRtpInfo",[](API_ARGS_MAP){
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id");
|
||||
auto src = MediaSource::find(DEFAULT_VHOST, kRtpAppName, allArgs["stream_id"]);
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto src = MediaSource::find(vhost, app, allArgs["stream_id"]);
|
||||
auto process = src ? src->getRtpProcess() : nullptr;
|
||||
if (!process) {
|
||||
val["exist"] = false;
|
||||
@ -1211,7 +1222,16 @@ void installWebApi() {
|
||||
api_regist("/index/api/openRtpServer",[](API_ARGS_MAP){
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("port", "stream_id");
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto stream_id = allArgs["stream_id"];
|
||||
auto tuple = MediaTuple { vhost, app, stream_id, "" };
|
||||
auto tcp_mode = allArgs["tcp_mode"].as<int>();
|
||||
if (allArgs["enable_tcp"].as<int>() && !tcp_mode) {
|
||||
//兼容老版本请求,新版本去除enable_tcp参数并新增tcp_mode参数
|
||||
@ -1226,10 +1246,10 @@ void installWebApi() {
|
||||
if (!allArgs["local_ip"].empty()) {
|
||||
local_ip = allArgs["local_ip"];
|
||||
}
|
||||
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
|
||||
auto port = openRtpServer(allArgs["port"], tuple, tcp_mode, local_ip, allArgs["re_use_port"].as<bool>(),
|
||||
allArgs["ssrc"].as<uint32_t>(), only_track);
|
||||
if (port == 0) {
|
||||
throw InvalidArgsException("该stream_id已存在");
|
||||
throw InvalidArgsException("This stream already exists");
|
||||
}
|
||||
//回复json
|
||||
val["port"] = port;
|
||||
@ -1238,7 +1258,16 @@ void installWebApi() {
|
||||
api_regist("/index/api/openRtpServerMultiplex", [](API_ARGS_MAP) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("port", "stream_id");
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto stream_id = allArgs["stream_id"];
|
||||
auto tuple = MediaTuple { vhost, app, stream_id, "" };
|
||||
auto tcp_mode = allArgs["tcp_mode"].as<int>();
|
||||
if (allArgs["enable_tcp"].as<int>() && !tcp_mode) {
|
||||
// 兼容老版本请求,新版本去除enable_tcp参数并新增tcp_mode参数
|
||||
@ -1253,9 +1282,10 @@ void installWebApi() {
|
||||
if (!allArgs["local_ip"].empty()) {
|
||||
local_ip = allArgs["local_ip"];
|
||||
}
|
||||
auto port = openRtpServer(allArgs["port"], stream_id, tcp_mode, local_ip, true, 0, only_track,true);
|
||||
|
||||
auto port = openRtpServer(allArgs["port"], tuple, tcp_mode, local_ip, true, 0, only_track, true);
|
||||
if (port == 0) {
|
||||
throw InvalidArgsException("该stream_id已存在");
|
||||
throw InvalidArgsException("This stream already exists");
|
||||
}
|
||||
// 回复json
|
||||
val["port"] = port;
|
||||
@ -1272,9 +1302,19 @@ void installWebApi() {
|
||||
invoker(200, headerOut, val.toStyledString());
|
||||
};
|
||||
|
||||
auto server = s_rtp_server.find(allArgs["stream_id"]);
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto stream_id = allArgs["stream_id"];
|
||||
auto tuple = MediaTuple { vhost, app, stream_id, "" };
|
||||
auto server = s_rtp_server.find(tuple.shortUrl());
|
||||
if (!server) {
|
||||
cb(SockException(Err_other, "未找到rtp服务"));
|
||||
cb(SockException(Err_other, "can not find the stream"));
|
||||
return;
|
||||
}
|
||||
server->connectToServer(allArgs["dst_url"], allArgs["dst_port"], cb);
|
||||
@ -1284,7 +1324,17 @@ void installWebApi() {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id");
|
||||
|
||||
if(s_rtp_server.erase(allArgs["stream_id"]) == 0){
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto stream_id = allArgs["stream_id"];
|
||||
auto tuple = MediaTuple { vhost, app, stream_id, "" };
|
||||
if (s_rtp_server.erase(tuple.shortUrl()) == 0) {
|
||||
val["hit"] = 0;
|
||||
return;
|
||||
}
|
||||
@ -1295,7 +1345,17 @@ void installWebApi() {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id", "ssrc");
|
||||
|
||||
auto server = s_rtp_server.find(allArgs["stream_id"]);
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto stream_id = allArgs["stream_id"];
|
||||
auto tuple = MediaTuple { vhost, app, stream_id, "" };
|
||||
auto server = s_rtp_server.find(tuple.shortUrl());
|
||||
if (!server) {
|
||||
throw ApiRetException("RtpServer not found by stream_id", API::NotFound);
|
||||
}
|
||||
@ -1307,8 +1367,11 @@ void installWebApi() {
|
||||
|
||||
std::lock_guard<std::recursive_mutex> lck(s_rtp_server._mtx);
|
||||
for (auto &pr : s_rtp_server._map) {
|
||||
auto vec = split(pr.first, "/");
|
||||
Value obj;
|
||||
obj["stream_id"] = pr.first;
|
||||
obj["vhost"] = vec[0];
|
||||
obj["app"] = vec[1];
|
||||
obj["stream_id"] = vec[2];
|
||||
obj["port"] = pr.second->getPort();
|
||||
val["data"].append(obj);
|
||||
}
|
||||
@ -1437,8 +1500,16 @@ void installWebApi() {
|
||||
api_regist("/index/api/pauseRtpCheck", [](API_ARGS_MAP) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id");
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
//只是暂停流的检查,流媒体服务器做为流负载服务,收流就转发,RTSP/RTMP有自己暂停协议
|
||||
auto src = MediaSource::find(DEFAULT_VHOST, kRtpAppName, allArgs["stream_id"]);
|
||||
auto src = MediaSource::find(vhost, app, allArgs["stream_id"]);
|
||||
auto process = src ? src->getRtpProcess() : nullptr;
|
||||
if (process) {
|
||||
process->setStopCheckRtp(true);
|
||||
@ -1450,7 +1521,15 @@ void installWebApi() {
|
||||
api_regist("/index/api/resumeRtpCheck", [](API_ARGS_MAP) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("stream_id");
|
||||
auto src = MediaSource::find(DEFAULT_VHOST, kRtpAppName, allArgs["stream_id"]);
|
||||
std::string vhost = DEFAULT_VHOST;
|
||||
if (!allArgs["vhost"].empty()) {
|
||||
vhost = allArgs["vhost"];
|
||||
}
|
||||
std::string app = kRtpAppName;
|
||||
if (!allArgs["app"].empty()) {
|
||||
app = allArgs["app"];
|
||||
}
|
||||
auto src = MediaSource::find(vhost, app, allArgs["stream_id"]);
|
||||
auto process = src ? src->getRtpProcess() : nullptr;
|
||||
if (process) {
|
||||
process->setStopCheckRtp(false);
|
||||
@ -1871,6 +1950,7 @@ void installWebApi() {
|
||||
});
|
||||
#endif
|
||||
|
||||
#if ENABLE_MP4
|
||||
api_regist("/index/api/loadMP4File", [](API_ARGS_MAP) {
|
||||
CHECK_SECRET();
|
||||
CHECK_ARGS("vhost", "app", "stream", "file_path");
|
||||
@ -1884,11 +1964,12 @@ void installWebApi() {
|
||||
option.load(allArgs);
|
||||
// 强制无人观看时自动关闭
|
||||
option.auto_close = true;
|
||||
|
||||
auto reader = std::make_shared<MP4Reader>(allArgs["vhost"], allArgs["app"], allArgs["stream"], allArgs["file_path"], option);
|
||||
auto tuple = MediaTuple{allArgs["vhost"], allArgs["app"], allArgs["stream"], ""};
|
||||
auto reader = std::make_shared<MP4Reader>(tuple, allArgs["file_path"], option);
|
||||
// sample_ms设置为0,从配置文件加载;file_repeat可以指定,如果配置文件也指定循环解复用,那么强制开启
|
||||
reader->startReadMP4(0, true, allArgs["file_repeat"]);
|
||||
});
|
||||
#endif
|
||||
|
||||
GET_CONFIG_FUNC(std::set<std::string>, download_roots, API::kDownloadRoot, [](const string &str) -> std::set<std::string> {
|
||||
std::set<std::string> ret;
|
||||
@ -1950,9 +2031,29 @@ void installWebApi() {
|
||||
|
||||
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
|
||||
int ret = 0;
|
||||
try {
|
||||
ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
|
||||
val["code"] = ret;
|
||||
val["msg"] = ret ? "failed" : "success";
|
||||
} catch (const std::exception &e) {
|
||||
val["code"] = -1;
|
||||
val["msg"] = e.what();
|
||||
}
|
||||
invoker(200, headerOut, val.toStyledString());
|
||||
});
|
||||
|
||||
api_regist("/index/api/stack/reset", [](API_ARGS_JSON_ASYNC) {
|
||||
CHECK_SECRET();
|
||||
int ret = 0;
|
||||
try {
|
||||
auto ret = VideoStackManager::Instance().resetVideoStack(allArgs.args);
|
||||
val["code"] = ret;
|
||||
val["msg"] = ret ? "failed" : "success";
|
||||
} catch (const std::exception &e) {
|
||||
val["code"] = -1;
|
||||
val["msg"] = e.what();
|
||||
}
|
||||
invoker(200, headerOut, val.toStyledString());
|
||||
});
|
||||
|
||||
@ -1974,6 +2075,9 @@ void unInstallWebApi(){
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
s_rtp_server.clear();
|
||||
#endif
|
||||
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_FFMPEG) && defined(ENABLE_X264)
|
||||
VideoStackManager::Instance().clear();
|
||||
#endif
|
||||
|
||||
NoticeCenter::Instance().delListener(&web_api_tag);
|
||||
}
|
||||
|
@ -202,12 +202,12 @@ void installWebApi();
|
||||
void unInstallWebApi();
|
||||
|
||||
#if defined(ENABLE_RTPPROXY)
|
||||
uint16_t openRtpServer(uint16_t local_port, const std::string &stream_id, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
|
||||
uint16_t openRtpServer(uint16_t local_port, const mediakit::MediaTuple &tuple, int tcp_mode, const std::string &local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex=false);
|
||||
#endif
|
||||
|
||||
Json::Value makeMediaSourceJson(mediakit::MediaSource &media);
|
||||
void getStatisticJson(const std::function<void(Json::Value &val)> &cb);
|
||||
void addStreamProxy(const std::string &vhost, const std::string &app, const std::string &stream, const std::string &url, int retry_count,
|
||||
void addStreamProxy(const mediakit::MediaTuple &tuple, const std::string &url, int retry_count,
|
||||
const mediakit::ProtocolOption &option, int rtp_type, float timeout_sec, const toolkit::mINI &args,
|
||||
const std::function<void(const toolkit::SockException &ex, const std::string &key)> &cb);
|
||||
#endif //ZLMEDIAKIT_WEBAPI_H
|
||||
|
@ -301,7 +301,7 @@ static void pullStreamFromOrigin(const vector<string> &urls, size_t index, size_
|
||||
option.enable_hls = option.enable_hls || (args.schema == HLS_SCHEMA);
|
||||
option.enable_mp4 = false;
|
||||
|
||||
addStreamProxy(args.vhost, args.app, args.stream, url, retry_count, option, Rtsp::RTP_TCP, timeout_sec, mINI{}, [=](const SockException &ex, const string &key) mutable {
|
||||
addStreamProxy(args, url, retry_count, option, Rtsp::RTP_TCP, timeout_sec, mINI{}, [=](const SockException &ex, const string &key) mutable {
|
||||
if (!ex) {
|
||||
return;
|
||||
}
|
||||
@ -682,7 +682,9 @@ void installWebHook() {
|
||||
|
||||
ArgsType body;
|
||||
body["local_port"] = local_port;
|
||||
body["stream_id"] = stream_id;
|
||||
body[VHOST_KEY] = tuple.vhost;
|
||||
body["app"] = tuple.app;
|
||||
body["stream_id"] = tuple.stream;
|
||||
body["tcp_mode"] = tcp_mode;
|
||||
body["re_use_port"] = re_use_port;
|
||||
body["ssrc"] = ssrc;
|
||||
|
@ -55,59 +55,13 @@ string getOriginTypeString(MediaOriginType type){
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ProtocolOption::ProtocolOption() {
|
||||
GET_CONFIG(int, s_modify_stamp, Protocol::kModifyStamp);
|
||||
GET_CONFIG(bool, s_enabel_audio, Protocol::kEnableAudio);
|
||||
GET_CONFIG(bool, s_add_mute_audio, Protocol::kAddMuteAudio);
|
||||
GET_CONFIG(bool, s_auto_close, Protocol::kAutoClose);
|
||||
GET_CONFIG(uint32_t, s_continue_push_ms, Protocol::kContinuePushMS);
|
||||
GET_CONFIG(uint32_t, s_paced_sender_ms, Protocol::kPacedSenderMS);
|
||||
|
||||
GET_CONFIG(bool, s_enable_hls, Protocol::kEnableHls);
|
||||
GET_CONFIG(bool, s_enable_hls_fmp4, Protocol::kEnableHlsFmp4);
|
||||
GET_CONFIG(bool, s_enable_mp4, Protocol::kEnableMP4);
|
||||
GET_CONFIG(bool, s_enable_rtsp, Protocol::kEnableRtsp);
|
||||
GET_CONFIG(bool, s_enable_rtmp, Protocol::kEnableRtmp);
|
||||
GET_CONFIG(bool, s_enable_ts, Protocol::kEnableTS);
|
||||
GET_CONFIG(bool, s_enable_fmp4, Protocol::kEnableFMP4);
|
||||
|
||||
GET_CONFIG(bool, s_hls_demand, Protocol::kHlsDemand);
|
||||
GET_CONFIG(bool, s_rtsp_demand, Protocol::kRtspDemand);
|
||||
GET_CONFIG(bool, s_rtmp_demand, Protocol::kRtmpDemand);
|
||||
GET_CONFIG(bool, s_ts_demand, Protocol::kTSDemand);
|
||||
GET_CONFIG(bool, s_fmp4_demand, Protocol::kFMP4Demand);
|
||||
|
||||
GET_CONFIG(bool, s_mp4_as_player, Protocol::kMP4AsPlayer);
|
||||
GET_CONFIG(uint32_t, s_mp4_max_second, Protocol::kMP4MaxSecond);
|
||||
GET_CONFIG(string, s_mp4_save_path, Protocol::kMP4SavePath);
|
||||
|
||||
GET_CONFIG(string, s_hls_save_path, Protocol::kHlsSavePath);
|
||||
|
||||
modify_stamp = s_modify_stamp;
|
||||
enable_audio = s_enabel_audio;
|
||||
add_mute_audio = s_add_mute_audio;
|
||||
auto_close = s_auto_close;
|
||||
continue_push_ms = s_continue_push_ms;
|
||||
paced_sender_ms = s_paced_sender_ms;
|
||||
|
||||
enable_hls = s_enable_hls;
|
||||
enable_hls_fmp4 = s_enable_hls_fmp4;
|
||||
enable_mp4 = s_enable_mp4;
|
||||
enable_rtsp = s_enable_rtsp;
|
||||
enable_rtmp = s_enable_rtmp;
|
||||
enable_ts = s_enable_ts;
|
||||
enable_fmp4 = s_enable_fmp4;
|
||||
|
||||
hls_demand = s_hls_demand;
|
||||
rtsp_demand = s_rtsp_demand;
|
||||
rtmp_demand = s_rtmp_demand;
|
||||
ts_demand = s_ts_demand;
|
||||
fmp4_demand = s_fmp4_demand;
|
||||
|
||||
mp4_as_player = s_mp4_as_player;
|
||||
mp4_max_second = s_mp4_max_second;
|
||||
mp4_save_path = s_mp4_save_path;
|
||||
|
||||
hls_save_path = s_hls_save_path;
|
||||
mINI ini;
|
||||
auto &config = mINI::Instance();
|
||||
static auto sz = strlen(Protocol::kFieldName);
|
||||
for (auto it = config.lower_bound(Protocol::kFieldName); it != config.end() && start_with(it->first, Protocol::kFieldName); ++it) {
|
||||
ini.emplace(it->first.substr(sz), it->second);
|
||||
}
|
||||
load(ini);
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
@ -641,7 +595,8 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string &
|
||||
}
|
||||
#ifdef ENABLE_MP4
|
||||
try {
|
||||
auto reader = std::make_shared<MP4Reader>(vhost, app, stream, file_path);
|
||||
MediaTuple tuple = {vhost, app, stream, ""};
|
||||
auto reader = std::make_shared<MP4Reader>(tuple, file_path);
|
||||
reader->startReadMP4();
|
||||
return MediaSource::find(schema, vhost, app, stream);
|
||||
} catch (std::exception &ex) {
|
||||
@ -711,7 +666,7 @@ string MediaSourceEvent::getOriginUrl(MediaSource &sender) const {
|
||||
MediaOriginType MediaSourceEventInterceptor::getOriginType(MediaSource &sender) const {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return MediaOriginType::unknown;
|
||||
return MediaSourceEvent::getOriginType(sender);
|
||||
}
|
||||
return listener->getOriginType(sender);
|
||||
}
|
||||
@ -731,7 +686,7 @@ string MediaSourceEventInterceptor::getOriginUrl(MediaSource &sender) const {
|
||||
std::shared_ptr<SockInfo> MediaSourceEventInterceptor::getOriginSock(MediaSource &sender) const {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return nullptr;
|
||||
return MediaSourceEvent::getOriginSock(sender);
|
||||
}
|
||||
return listener->getOriginSock(sender);
|
||||
}
|
||||
@ -739,7 +694,7 @@ std::shared_ptr<SockInfo> MediaSourceEventInterceptor::getOriginSock(MediaSource
|
||||
bool MediaSourceEventInterceptor::seekTo(MediaSource &sender, uint32_t stamp) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return false;
|
||||
return MediaSourceEvent::seekTo(sender, stamp);
|
||||
}
|
||||
return listener->seekTo(sender, stamp);
|
||||
}
|
||||
@ -747,7 +702,7 @@ bool MediaSourceEventInterceptor::seekTo(MediaSource &sender, uint32_t stamp) {
|
||||
bool MediaSourceEventInterceptor::pause(MediaSource &sender, bool pause) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return false;
|
||||
return MediaSourceEvent::pause(sender, pause);
|
||||
}
|
||||
return listener->pause(sender, pause);
|
||||
}
|
||||
@ -755,7 +710,7 @@ bool MediaSourceEventInterceptor::pause(MediaSource &sender, bool pause) {
|
||||
bool MediaSourceEventInterceptor::speed(MediaSource &sender, float speed) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return false;
|
||||
return MediaSourceEvent::speed(sender, speed);
|
||||
}
|
||||
return listener->speed(sender, speed);
|
||||
}
|
||||
@ -763,7 +718,7 @@ bool MediaSourceEventInterceptor::speed(MediaSource &sender, float speed) {
|
||||
bool MediaSourceEventInterceptor::close(MediaSource &sender) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return false;
|
||||
return MediaSourceEvent::close(sender);
|
||||
}
|
||||
return listener->close(sender);
|
||||
}
|
||||
@ -771,7 +726,7 @@ bool MediaSourceEventInterceptor::close(MediaSource &sender) {
|
||||
int MediaSourceEventInterceptor::totalReaderCount(MediaSource &sender) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return sender.readerCount();
|
||||
return MediaSourceEvent::totalReaderCount(sender);
|
||||
}
|
||||
return listener->totalReaderCount(sender);
|
||||
}
|
||||
@ -779,49 +734,55 @@ int MediaSourceEventInterceptor::totalReaderCount(MediaSource &sender) {
|
||||
void MediaSourceEventInterceptor::onReaderChanged(MediaSource &sender, int size) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
MediaSourceEvent::onReaderChanged(sender, size);
|
||||
} else {
|
||||
listener->onReaderChanged(sender, size);
|
||||
return MediaSourceEvent::onReaderChanged(sender, size);
|
||||
}
|
||||
listener->onReaderChanged(sender, size);
|
||||
}
|
||||
|
||||
void MediaSourceEventInterceptor::onRegist(MediaSource &sender, bool regist) {
|
||||
auto listener = _listener.lock();
|
||||
if (listener) {
|
||||
listener->onRegist(sender, regist);
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::onRegist(sender, regist);
|
||||
}
|
||||
listener->onRegist(sender, regist);
|
||||
}
|
||||
|
||||
float MediaSourceEventInterceptor::getLossRate(MediaSource &sender, TrackType type) {
|
||||
auto listener = _listener.lock();
|
||||
if (listener) {
|
||||
return listener->getLossRate(sender, type);
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::getLossRate(sender, type);
|
||||
}
|
||||
return -1; //异常返回-1
|
||||
return listener->getLossRate(sender, type);
|
||||
}
|
||||
|
||||
toolkit::EventPoller::Ptr MediaSourceEventInterceptor::getOwnerPoller(MediaSource &sender) {
|
||||
auto listener = _listener.lock();
|
||||
if (listener) {
|
||||
return listener->getOwnerPoller(sender);
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::getOwnerPoller(sender);
|
||||
}
|
||||
throw std::runtime_error(toolkit::demangle(typeid(*this).name()) + "::getOwnerPoller failed");
|
||||
return listener->getOwnerPoller(sender);
|
||||
}
|
||||
|
||||
std::shared_ptr<MultiMediaSourceMuxer> MediaSourceEventInterceptor::getMuxer(MediaSource &sender) const {
|
||||
auto listener = _listener.lock();
|
||||
return listener ? listener->getMuxer(sender) : nullptr;
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::getMuxer(sender);
|
||||
}
|
||||
return listener->getMuxer(sender);
|
||||
}
|
||||
|
||||
std::shared_ptr<RtpProcess> MediaSourceEventInterceptor::getRtpProcess(MediaSource &sender) const {
|
||||
auto listener = _listener.lock();
|
||||
return listener ? listener->getRtpProcess(sender) : nullptr;
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::getRtpProcess(sender);
|
||||
}
|
||||
return listener->getRtpProcess(sender);
|
||||
}
|
||||
|
||||
bool MediaSourceEventInterceptor::setupRecord(MediaSource &sender, Recorder::type type, bool start, const string &custom_path, size_t max_second) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return false;
|
||||
return MediaSourceEvent::setupRecord(sender, type, start, custom_path, max_second);
|
||||
}
|
||||
return listener->setupRecord(sender, type, start, custom_path, max_second);
|
||||
}
|
||||
@ -829,7 +790,7 @@ bool MediaSourceEventInterceptor::setupRecord(MediaSource &sender, Recorder::typ
|
||||
bool MediaSourceEventInterceptor::isRecording(MediaSource &sender, Recorder::type type) {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return false;
|
||||
return MediaSourceEvent::isRecording(sender, type);
|
||||
}
|
||||
return listener->isRecording(sender, type);
|
||||
}
|
||||
@ -837,26 +798,25 @@ bool MediaSourceEventInterceptor::isRecording(MediaSource &sender, Recorder::typ
|
||||
vector<Track::Ptr> MediaSourceEventInterceptor::getMediaTracks(MediaSource &sender, bool trackReady) const {
|
||||
auto listener = _listener.lock();
|
||||
if (!listener) {
|
||||
return vector<Track::Ptr>();
|
||||
return MediaSourceEvent::getMediaTracks(sender, trackReady);
|
||||
}
|
||||
return listener->getMediaTracks(sender, trackReady);
|
||||
}
|
||||
|
||||
void MediaSourceEventInterceptor::startSendRtp(MediaSource &sender, const MediaSourceEvent::SendRtpArgs &args, const std::function<void(uint16_t, const toolkit::SockException &)> cb) {
|
||||
auto listener = _listener.lock();
|
||||
if (listener) {
|
||||
listener->startSendRtp(sender, args, cb);
|
||||
} else {
|
||||
MediaSourceEvent::startSendRtp(sender, args, cb);
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::startSendRtp(sender, args, cb);
|
||||
}
|
||||
listener->startSendRtp(sender, args, cb);
|
||||
}
|
||||
|
||||
bool MediaSourceEventInterceptor::stopSendRtp(MediaSource &sender, const string &ssrc) {
|
||||
auto listener = _listener.lock();
|
||||
if (listener) {
|
||||
return listener->stopSendRtp(sender, ssrc);
|
||||
if (!listener) {
|
||||
return MediaSourceEvent::stopSendRtp(sender, ssrc);
|
||||
}
|
||||
return false;
|
||||
return listener->stopSendRtp(sender, ssrc);
|
||||
}
|
||||
|
||||
void MediaSourceEventInterceptor::setDelegate(const std::weak_ptr<MediaSourceEvent> &listener) {
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
#include <functional>
|
||||
#include "Util/mini.h"
|
||||
#include "Network/Socket.h"
|
||||
#include "Extension/Track.h"
|
||||
#include "Record/Recorder.h"
|
||||
@ -148,6 +149,14 @@ static void getArgsValue(const MAP &allArgs, const KEY &key, TYPE &value) {
|
||||
}
|
||||
}
|
||||
|
||||
template <typename KEY, typename TYPE>
|
||||
static void getArgsValue(const toolkit::mINI &allArgs, const KEY &key, TYPE &value) {
|
||||
auto it = allArgs.find(key);
|
||||
if (it != allArgs.end()) {
|
||||
value = (TYPE)it->second;
|
||||
}
|
||||
}
|
||||
|
||||
class ProtocolOption {
|
||||
public:
|
||||
ProtocolOption();
|
||||
|
@ -104,33 +104,32 @@ static onceToken token([]() {
|
||||
} // namespace General
|
||||
|
||||
namespace Protocol {
|
||||
#define PROTOCOL_FIELD "protocol."
|
||||
const string kModifyStamp = PROTOCOL_FIELD "modify_stamp";
|
||||
const string kEnableAudio = PROTOCOL_FIELD "enable_audio";
|
||||
const string kAddMuteAudio = PROTOCOL_FIELD "add_mute_audio";
|
||||
const string kAutoClose = PROTOCOL_FIELD "auto_close";
|
||||
const string kContinuePushMS = PROTOCOL_FIELD "continue_push_ms";
|
||||
const string kPacedSenderMS = PROTOCOL_FIELD "paced_sender_ms";
|
||||
const string kModifyStamp = string(kFieldName) + "modify_stamp";
|
||||
const string kEnableAudio = string(kFieldName) + "enable_audio";
|
||||
const string kAddMuteAudio = string(kFieldName) + "add_mute_audio";
|
||||
const string kAutoClose = string(kFieldName) + "auto_close";
|
||||
const string kContinuePushMS = string(kFieldName) + "continue_push_ms";
|
||||
const string kPacedSenderMS = string(kFieldName) + "paced_sender_ms";
|
||||
|
||||
const string kEnableHls = PROTOCOL_FIELD "enable_hls";
|
||||
const string kEnableHlsFmp4 = PROTOCOL_FIELD "enable_hls_fmp4";
|
||||
const string kEnableMP4 = PROTOCOL_FIELD "enable_mp4";
|
||||
const string kEnableRtsp = PROTOCOL_FIELD "enable_rtsp";
|
||||
const string kEnableRtmp = PROTOCOL_FIELD "enable_rtmp";
|
||||
const string kEnableTS = PROTOCOL_FIELD "enable_ts";
|
||||
const string kEnableFMP4 = PROTOCOL_FIELD "enable_fmp4";
|
||||
const string kEnableHls = string(kFieldName) + "enable_hls";
|
||||
const string kEnableHlsFmp4 = string(kFieldName) + "enable_hls_fmp4";
|
||||
const string kEnableMP4 = string(kFieldName) + "enable_mp4";
|
||||
const string kEnableRtsp = string(kFieldName) + "enable_rtsp";
|
||||
const string kEnableRtmp = string(kFieldName) + "enable_rtmp";
|
||||
const string kEnableTS = string(kFieldName) + "enable_ts";
|
||||
const string kEnableFMP4 = string(kFieldName) + "enable_fmp4";
|
||||
|
||||
const string kMP4AsPlayer = PROTOCOL_FIELD "mp4_as_player";
|
||||
const string kMP4MaxSecond = PROTOCOL_FIELD "mp4_max_second";
|
||||
const string kMP4SavePath = PROTOCOL_FIELD "mp4_save_path";
|
||||
const string kMP4AsPlayer = string(kFieldName) + "mp4_as_player";
|
||||
const string kMP4MaxSecond = string(kFieldName) + "mp4_max_second";
|
||||
const string kMP4SavePath = string(kFieldName) + "mp4_save_path";
|
||||
|
||||
const string kHlsSavePath = PROTOCOL_FIELD "hls_save_path";
|
||||
const string kHlsSavePath = string(kFieldName) + "hls_save_path";
|
||||
|
||||
const string kHlsDemand = PROTOCOL_FIELD "hls_demand";
|
||||
const string kRtspDemand = PROTOCOL_FIELD "rtsp_demand";
|
||||
const string kRtmpDemand = PROTOCOL_FIELD "rtmp_demand";
|
||||
const string kTSDemand = PROTOCOL_FIELD "ts_demand";
|
||||
const string kFMP4Demand = PROTOCOL_FIELD "fmp4_demand";
|
||||
const string kHlsDemand = string(kFieldName) + "hls_demand";
|
||||
const string kRtspDemand = string(kFieldName) + "rtsp_demand";
|
||||
const string kRtmpDemand = string(kFieldName) + "rtmp_demand";
|
||||
const string kTSDemand = string(kFieldName) + "ts_demand";
|
||||
const string kFMP4Demand = string(kFieldName) + "fmp4_demand";
|
||||
|
||||
static onceToken token([]() {
|
||||
mINI::Instance()[kModifyStamp] = (int)ProtocolOption::kModifyStampRelative;
|
||||
@ -375,6 +374,7 @@ const string kBenchmarkMode = "benchmark_mode";
|
||||
const string kWaitTrackReady = "wait_track_ready";
|
||||
const string kPlayTrack = "play_track";
|
||||
const string kProxyUrl = "proxy_url";
|
||||
const string kRtspSpeed = "rtsp_speed";
|
||||
} // namespace Client
|
||||
|
||||
} // namespace mediakit
|
||||
|
@ -107,7 +107,7 @@ extern const std::string kBroadcastReloadConfig;
|
||||
|
||||
// rtp server 超时
|
||||
extern const std::string kBroadcastRtpServerTimeout;
|
||||
#define BroadcastRtpServerTimeoutArgs uint16_t &local_port, const string &stream_id,int &tcp_mode, bool &re_use_port, uint32_t &ssrc
|
||||
#define BroadcastRtpServerTimeoutArgs uint16_t &local_port, const MediaTuple &tuple, int &tcp_mode, bool &re_use_port, uint32_t &ssrc
|
||||
|
||||
// rtc transport sctp 连接状态
|
||||
extern const std::string kBroadcastRtcSctpConnecting;
|
||||
@ -205,6 +205,7 @@ extern const std::string kBroadcastPlayerCountChanged;
|
||||
} // namespace General
|
||||
|
||||
namespace Protocol {
|
||||
static constexpr char kFieldName[] = "protocol.";
|
||||
//时间戳修复这一路流标志位
|
||||
extern const std::string kModifyStamp;
|
||||
//转协议是否开启音频
|
||||
@ -447,6 +448,8 @@ extern const std::string kWaitTrackReady;
|
||||
extern const std::string kPlayTrack;
|
||||
//设置代理url,目前只支持http协议
|
||||
extern const std::string kProxyUrl;
|
||||
//设置开始rtsp倍速播放
|
||||
extern const std::string kRtspSpeed;
|
||||
} // namespace Client
|
||||
} // namespace mediakit
|
||||
|
||||
|
@ -109,6 +109,11 @@ public:
|
||||
* 返回视频fps
|
||||
*/
|
||||
virtual float getVideoFps() const { return 0; }
|
||||
|
||||
/**
|
||||
* 返回相关 sps/pps 等
|
||||
*/
|
||||
virtual std::vector<Frame::Ptr> getConfigFrames() const { return std::vector<Frame::Ptr>{}; }
|
||||
};
|
||||
|
||||
class VideoTrackImp : public VideoTrack {
|
||||
|
@ -41,7 +41,13 @@ public:
|
||||
FMP4MediaSource(const MediaTuple& tuple,
|
||||
int ring_size = FMP4_GOP_SIZE) : MediaSource(FMP4_SCHEMA, tuple), _ring_size(ring_size) {}
|
||||
|
||||
~FMP4MediaSource() override { flush(); }
|
||||
~FMP4MediaSource() override {
|
||||
try {
|
||||
flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取媒体源的环形缓冲
|
||||
|
@ -26,7 +26,13 @@ public:
|
||||
_media_src = std::make_shared<FMP4MediaSource>(tuple);
|
||||
}
|
||||
|
||||
~FMP4MediaSourceMuxer() override { MP4MuxerMemory::flush(); };
|
||||
~FMP4MediaSourceMuxer() override {
|
||||
try {
|
||||
MP4MuxerMemory::flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
|
||||
setDelegate(listener);
|
||||
|
@ -141,7 +141,9 @@ static std::shared_ptr<char> getSharedMmap(const string &file_path, int64_t &fil
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
file_size = ::GetFileSize(hfile, NULL);
|
||||
LARGE_INTEGER FileSize;
|
||||
GetFileSizeEx(hfile, &FileSize); //GetFileSize函数的拓展,可用于获取大于4G的文件大小
|
||||
file_size = FileSize.QuadPart;
|
||||
|
||||
auto hmapping = ::CreateFileMapping(hfile, NULL, PAGE_READONLY, 0, 0, NULL);
|
||||
|
||||
|
@ -24,7 +24,7 @@ namespace mediakit {
|
||||
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &in_poller, const string &url_in) {
|
||||
auto poller = in_poller ? in_poller : EventPollerPool::Instance().getPoller();
|
||||
std::weak_ptr<EventPoller> weak_poller = poller;
|
||||
static auto release_func = [weak_poller](PlayerBase *ptr) {
|
||||
auto release_func = [weak_poller](PlayerBase *ptr) {
|
||||
if (auto poller = weak_poller.lock()) {
|
||||
poller->async([ptr]() {
|
||||
onceToken token(nullptr, [&]() { delete ptr; });
|
||||
|
@ -24,13 +24,9 @@ using namespace std;
|
||||
namespace mediakit {
|
||||
|
||||
PlayerProxy::PlayerProxy(
|
||||
const string &vhost, const string &app, const string &stream_id, const ProtocolOption &option, int retry_count,
|
||||
const MediaTuple &tuple, const ProtocolOption &option, int retry_count,
|
||||
const EventPoller::Ptr &poller, int reconnect_delay_min, int reconnect_delay_max, int reconnect_delay_step)
|
||||
: MediaPlayer(poller)
|
||||
, _option(option) {
|
||||
_tuple.vhost = vhost;
|
||||
_tuple.app = app;
|
||||
_tuple.stream = stream_id;
|
||||
: MediaPlayer(poller), _tuple(tuple), _option(option) {
|
||||
_retry_count = retry_count;
|
||||
|
||||
setOnClose(nullptr);
|
||||
|
@ -66,9 +66,9 @@ public:
|
||||
|
||||
// 如果retry_count<0,则一直重试播放;否则重试retry_count次数
|
||||
// 默认一直重试
|
||||
PlayerProxy(
|
||||
const std::string &vhost, const std::string &app, const std::string &stream_id, const ProtocolOption &option, int retry_count = -1,
|
||||
const toolkit::EventPoller::Ptr &poller = nullptr, int reconnect_delay_min = 2, int reconnect_delay_max = 60, int reconnect_delay_step = 3);
|
||||
PlayerProxy(const MediaTuple &tuple, const ProtocolOption &option, int retry_count = -1,
|
||||
const toolkit::EventPoller::Ptr &poller = nullptr,
|
||||
int reconnect_delay_min = 2, int reconnect_delay_max = 60, int reconnect_delay_step = 3);
|
||||
|
||||
~PlayerProxy() override;
|
||||
|
||||
@ -129,12 +129,12 @@ private:
|
||||
void setTranslationInfo();
|
||||
|
||||
private:
|
||||
ProtocolOption _option;
|
||||
int _retry_count;
|
||||
int _reconnect_delay_min;
|
||||
int _reconnect_delay_max;
|
||||
int _reconnect_delay_step;
|
||||
MediaTuple _tuple;
|
||||
ProtocolOption _option;
|
||||
std::string _pull_url;
|
||||
toolkit::Timer::Ptr _timer;
|
||||
std::function<void()> _on_disconnect;
|
||||
|
@ -84,7 +84,13 @@ public:
|
||||
using Ptr = std::shared_ptr<HlsRecorder>;
|
||||
template <typename ...ARGS>
|
||||
HlsRecorder(ARGS && ...args) : HlsRecorderBase<MpegMuxer>(false, std::forward<ARGS>(args)...) {}
|
||||
~HlsRecorder() override { this->flush(); }
|
||||
~HlsRecorder() override {
|
||||
try {
|
||||
this->flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void onWrite(std::shared_ptr<toolkit::Buffer> buffer, uint64_t timestamp, bool key_pos) override {
|
||||
@ -102,7 +108,13 @@ public:
|
||||
using Ptr = std::shared_ptr<HlsFMP4Recorder>;
|
||||
template <typename ...ARGS>
|
||||
HlsFMP4Recorder(ARGS && ...args) : HlsRecorderBase<MP4MuxerMemory>(true, std::forward<ARGS>(args)...) {}
|
||||
~HlsFMP4Recorder() override { this->flush(); }
|
||||
~HlsFMP4Recorder() override {
|
||||
try {
|
||||
this->flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
void addTrackCompleted() override {
|
||||
HlsRecorderBase<MP4MuxerMemory>::addTrackCompleted();
|
||||
|
@ -99,6 +99,20 @@ bool MP4MuxerInterface::inputFrame(const Frame::Ptr &frame) {
|
||||
_started = true;
|
||||
}
|
||||
|
||||
// fmp4封装超过一定I帧间隔,强制刷新segment,防止内存上涨
|
||||
if (frame->getTrackType() == TrackVideo && _mov_writter->fmp4) {
|
||||
if (frame->keyFrame()) {
|
||||
_non_iframe_video_count = 0;
|
||||
} else {
|
||||
_non_iframe_video_count++;
|
||||
}
|
||||
|
||||
if (_non_iframe_video_count > 200) {
|
||||
saveSegment();
|
||||
_non_iframe_video_count = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// mp4文件时间戳需要从0开始
|
||||
auto &track = it->second;
|
||||
switch (frame->getCodecId()) {
|
||||
@ -164,6 +178,7 @@ bool MP4MuxerInterface::addTrack(const Track::Ptr &track) {
|
||||
}
|
||||
_tracks[track->getIndex()].track_id = track_id;
|
||||
_have_video = true;
|
||||
_non_iframe_video_count = 0;
|
||||
} else if (track->getTrackType() == TrackAudio) {
|
||||
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
|
||||
CHECK(audio_track);
|
||||
|
@ -72,6 +72,7 @@ private:
|
||||
bool _started = false;
|
||||
bool _have_video = false;
|
||||
MP4FileIO::Writer _mov_writter;
|
||||
int _non_iframe_video_count; // 非I帧个数
|
||||
|
||||
class FrameMergerImp : public FrameMerger {
|
||||
public:
|
||||
|
@ -20,7 +20,7 @@ using namespace toolkit;
|
||||
|
||||
namespace mediakit {
|
||||
|
||||
MP4Reader::MP4Reader(const std::string &vhost, const std::string &app, const std::string &stream_id, const string &file_path,
|
||||
MP4Reader::MP4Reader(const MediaTuple &tuple, const string &file_path,
|
||||
toolkit::EventPoller::Ptr poller) {
|
||||
ProtocolOption option;
|
||||
// 读取mp4文件并流化时,不重复生成mp4/hls文件
|
||||
@ -29,16 +29,15 @@ MP4Reader::MP4Reader(const std::string &vhost, const std::string &app, const std
|
||||
option.enable_hls_fmp4 = false;
|
||||
// mp4支持多track
|
||||
option.max_track = 16;
|
||||
setup(vhost, app, stream_id, file_path, option, std::move(poller));
|
||||
setup(tuple, file_path, option, std::move(poller));
|
||||
}
|
||||
|
||||
MP4Reader::MP4Reader(const std::string &vhost, const std::string &app, const std::string &stream_id, const string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller) {
|
||||
setup(vhost, app, stream_id, file_path, option, std::move(poller));
|
||||
MP4Reader::MP4Reader(const MediaTuple &tuple, const string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller) {
|
||||
setup(tuple, file_path, option, std::move(poller));
|
||||
}
|
||||
|
||||
void MP4Reader::setup(const std::string &vhost, const std::string &app, const std::string &stream_id, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller) {
|
||||
void MP4Reader::setup(const MediaTuple &tuple, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller) {
|
||||
//读写文件建议放在后台线程
|
||||
auto tuple = MediaTuple{vhost, app, stream_id, ""};
|
||||
_poller = poller ? std::move(poller) : WorkThreadPool::Instance().getPoller();
|
||||
_file_path = file_path;
|
||||
if (_file_path.empty()) {
|
||||
|
@ -28,11 +28,9 @@ public:
|
||||
* @param stream_id 流id,置空时,只解复用mp4,但是不生成MediaSource
|
||||
* @param file_path 文件路径,如果为空则根据配置文件和上面参数自动生成,否则使用指定的文件
|
||||
*/
|
||||
MP4Reader(const std::string &vhost, const std::string &app, const std::string &stream_id,
|
||||
const std::string &file_path = "", toolkit::EventPoller::Ptr poller = nullptr);
|
||||
MP4Reader(const MediaTuple &tuple, const std::string &file_path = "", toolkit::EventPoller::Ptr poller = nullptr);
|
||||
|
||||
MP4Reader(const std::string &vhost, const std::string &app, const std::string &stream_id,
|
||||
const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller = nullptr);
|
||||
MP4Reader(const MediaTuple &tuple, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller = nullptr);
|
||||
|
||||
/**
|
||||
* 开始解复用MP4文件
|
||||
@ -69,7 +67,7 @@ private:
|
||||
void setCurrentStamp(uint32_t stamp);
|
||||
bool seekTo(uint32_t stamp_seek);
|
||||
|
||||
void setup(const std::string &vhost, const std::string &app, const std::string &stream_id, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller);
|
||||
void setup(const MediaTuple &tuple, const std::string &file_path, const ProtocolOption &option, toolkit::EventPoller::Ptr poller);
|
||||
|
||||
private:
|
||||
bool _file_repeat = false;
|
||||
|
@ -48,7 +48,13 @@ public:
|
||||
*/
|
||||
RtmpMediaSource(const MediaTuple& tuple, int ring_size = RTMP_GOP_SIZE): MediaSource(RTMP_SCHEMA, tuple), _ring_size(ring_size) {}
|
||||
|
||||
~RtmpMediaSource() override { flush(); }
|
||||
~RtmpMediaSource() override {
|
||||
try {
|
||||
flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取媒体源的环形缓冲
|
||||
|
@ -85,14 +85,17 @@ DecoderImp::DecoderImp(const Decoder::Ptr &decoder, MediaSinkInterface *sink){
|
||||
#if defined(ENABLE_RTPPROXY) || defined(ENABLE_HLS)
|
||||
|
||||
void DecoderImp::onStream(int stream, int codecid, const void *extra, size_t bytes, int finish) {
|
||||
// G711传统只支持 8000/1/16的规格,FFmpeg貌似做了扩展,但是这里不管它了
|
||||
auto track = Factory::getTrackByCodecId(getCodecByMpegId(codecid), 8000, 1, 16);
|
||||
if (!track) {
|
||||
if (_finished) {
|
||||
return;
|
||||
}
|
||||
// G711传统只支持 8000/1/16的规格,FFmpeg貌似做了扩展,但是这里不管它了
|
||||
auto track = Factory::getTrackByCodecId(getCodecByMpegId(codecid), 8000, 1, 16);
|
||||
if (track) {
|
||||
onTrack(stream, std::move(track));
|
||||
}
|
||||
// 防止未获取视频track提前complete导致忽略后续视频的问题,用于兼容一些不太规范的ps流
|
||||
if (finish && _have_video) {
|
||||
_finished = true;
|
||||
_sink->addTrackCompleted();
|
||||
InfoL << "Add track finished";
|
||||
}
|
||||
|
@ -57,6 +57,7 @@ private:
|
||||
void onStream(int stream, int codecid, const void *extra, size_t bytes, int finish);
|
||||
|
||||
private:
|
||||
bool _finished = false;
|
||||
bool _have_video = false;
|
||||
Decoder::Ptr _decoder;
|
||||
MediaSinkInterface *_sink;
|
||||
|
@ -41,7 +41,7 @@ private:
|
||||
class RtpCachePS : public RtpCache, public PSEncoderImp {
|
||||
public:
|
||||
RtpCachePS(onFlushed cb, uint32_t ssrc, uint8_t payload_type = 96, bool ps_or_ts = true) :
|
||||
RtpCache(std::move(cb)), PSEncoderImp(ssrc, ps_or_ts ? payload_type : Rtsp::PT_MP2T, ps_or_ts) {};
|
||||
RtpCache(std::move(cb)), PSEncoderImp(ssrc, ps_or_ts ? payload_type : static_cast<int>(Rtsp::PT_MP2T), ps_or_ts) {};
|
||||
|
||||
void flush() override;
|
||||
|
||||
|
@ -18,22 +18,20 @@ using namespace std;
|
||||
using namespace toolkit;
|
||||
|
||||
//在创建_muxer对象前(也就是推流鉴权成功前),需要先缓存frame,这样可以防止丢包,提高体验
|
||||
//但是同时需要控制缓冲长度,防止内存溢出。200帧数据,大概有10秒数据,应该足矣等待鉴权hook返回
|
||||
static constexpr size_t kMaxCachedFrame = 200;
|
||||
//但是同时需要控制缓冲长度,防止内存溢出。最多缓存10秒数据,应该足矣等待鉴权hook返回
|
||||
static constexpr size_t kMaxCachedFrameMS = 10 * 1000;
|
||||
|
||||
namespace mediakit {
|
||||
|
||||
RtpProcess::Ptr RtpProcess::createProcess(std::string stream_id) {
|
||||
RtpProcess::Ptr ret(new RtpProcess(std::move(stream_id)));
|
||||
RtpProcess::Ptr RtpProcess::createProcess(const MediaTuple &tuple) {
|
||||
RtpProcess::Ptr ret(new RtpProcess(tuple));
|
||||
ret->createTimer();
|
||||
return ret;
|
||||
}
|
||||
|
||||
RtpProcess::RtpProcess(string stream_id) {
|
||||
_media_info.schema = kRtpAppName;
|
||||
_media_info.vhost = DEFAULT_VHOST;
|
||||
_media_info.app = kRtpAppName;
|
||||
_media_info.stream = std::move(stream_id);
|
||||
RtpProcess::RtpProcess(const MediaTuple &tuple) {
|
||||
_media_info.schema = "rtp";
|
||||
static_cast<MediaTuple &>(_media_info) = tuple;
|
||||
|
||||
GET_CONFIG(string, dump_dir, RtpProxy::kDumpDir);
|
||||
{
|
||||
@ -112,6 +110,7 @@ bool RtpProcess::inputRtp(bool is_udp, const Socket::Ptr &sock, const char *data
|
||||
_addr.reset(new sockaddr_storage(*((sockaddr_storage *)addr)));
|
||||
if (first) {
|
||||
emitOnPublish();
|
||||
_cache_ticker.resetTime();
|
||||
}
|
||||
}
|
||||
|
||||
@ -152,8 +151,8 @@ bool RtpProcess::inputFrame(const Frame::Ptr &frame) {
|
||||
_last_frame_time.resetTime();
|
||||
return _muxer->inputFrame(frame);
|
||||
}
|
||||
if (_cached_func.size() > kMaxCachedFrame) {
|
||||
WarnL << "cached frame of track(" << frame->getCodecName() << ") is too much, now dropped, please check your on_publish hook url in config.ini file";
|
||||
if (_cache_ticker.elapsedTime() > kMaxCachedFrameMS) {
|
||||
WarnL << "Cached frame of stream(" << _media_info.stream << ") is too much, your on_publish hook responded too late!";
|
||||
return false;
|
||||
}
|
||||
auto frame_cached = Frame::getCacheAbleFrame(frame);
|
||||
|
@ -25,7 +25,7 @@ public:
|
||||
using Ptr = std::shared_ptr<RtpProcess>;
|
||||
using onDetachCB = std::function<void(const toolkit::SockException &ex)>;
|
||||
|
||||
static Ptr createProcess(std::string stream_id);
|
||||
static Ptr createProcess(const MediaTuple &tuple);
|
||||
~RtpProcess();
|
||||
enum OnlyTrack { kAll = 0, kOnlyAudio = 1, kOnlyVideo = 2 };
|
||||
|
||||
@ -91,7 +91,7 @@ protected:
|
||||
bool close(mediakit::MediaSource &sender) override;
|
||||
|
||||
private:
|
||||
RtpProcess(std::string stream_id);
|
||||
RtpProcess(const MediaTuple &tuple);
|
||||
|
||||
void emitOnPublish();
|
||||
void doCachedFunc();
|
||||
@ -117,6 +117,7 @@ private:
|
||||
toolkit::Timer::Ptr _timer;
|
||||
toolkit::Ticker _last_check_alive;
|
||||
std::recursive_mutex _func_mtx;
|
||||
toolkit::Ticker _cache_ticker;
|
||||
std::deque<std::function<void()> > _cached_func;
|
||||
};
|
||||
|
||||
|
@ -30,18 +30,18 @@ class RtcpHelper: public std::enable_shared_from_this<RtcpHelper> {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<RtcpHelper>;
|
||||
|
||||
RtcpHelper(Socket::Ptr rtcp_sock, std::string stream_id) {
|
||||
RtcpHelper(Socket::Ptr rtcp_sock, MediaTuple tuple) {
|
||||
_rtcp_sock = std::move(rtcp_sock);
|
||||
_stream_id = std::move(stream_id);
|
||||
_tuple = std::move(tuple);
|
||||
}
|
||||
|
||||
void setRtpServerInfo(uint16_t local_port, RtpServer::TcpMode mode, bool re_use_port, uint32_t ssrc, int only_track) {
|
||||
_ssrc = ssrc;
|
||||
_process = RtpProcess::createProcess(_stream_id);
|
||||
_process = RtpProcess::createProcess(_tuple);
|
||||
_process->setOnlyTrack((RtpProcess::OnlyTrack)only_track);
|
||||
|
||||
_timeout_cb = [=]() mutable {
|
||||
NOTICE_EMIT(BroadcastRtpServerTimeoutArgs, Broadcast::kBroadcastRtpServerTimeout, local_port, _stream_id, (int)mode, re_use_port, ssrc);
|
||||
NOTICE_EMIT(BroadcastRtpServerTimeoutArgs, Broadcast::kBroadcastRtpServerTimeout, local_port, _tuple, (int)mode, re_use_port, ssrc);
|
||||
};
|
||||
|
||||
weak_ptr<RtcpHelper> weak_self = shared_from_this();
|
||||
@ -117,15 +117,16 @@ private:
|
||||
Ticker _ticker;
|
||||
Socket::Ptr _rtcp_sock;
|
||||
RtpProcess::Ptr _process;
|
||||
std::string _stream_id;
|
||||
MediaTuple _tuple;
|
||||
RtpProcess::onDetachCB _on_detach;
|
||||
std::shared_ptr<struct sockaddr_storage> _rtcp_addr;
|
||||
};
|
||||
|
||||
void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
|
||||
void RtpServer::start(uint16_t local_port, const MediaTuple &tuple, TcpMode tcp_mode, const char *local_ip, bool re_use_port, uint32_t ssrc, int only_track, bool multiplex) {
|
||||
//创建udp服务器
|
||||
Socket::Ptr rtp_socket = Socket::createSocket(nullptr, true);
|
||||
Socket::Ptr rtcp_socket = Socket::createSocket(nullptr, true);
|
||||
auto poller = EventPollerPool::Instance().getPoller();
|
||||
Socket::Ptr rtp_socket = Socket::createSocket(poller, true);
|
||||
Socket::Ptr rtcp_socket = Socket::createSocket(poller, true);
|
||||
if (local_port == 0) {
|
||||
//随机端口,rtp端口采用偶数
|
||||
auto pair = std::make_pair(rtp_socket, rtcp_socket);
|
||||
@ -147,9 +148,9 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
|
||||
UdpServer::Ptr udp_server;
|
||||
RtcpHelper::Ptr helper;
|
||||
//增加了多路复用判断,如果多路复用为true,就走else逻辑,同时保留了原来stream_id为空走else逻辑
|
||||
if (!stream_id.empty() && !multiplex) {
|
||||
if (!tuple.stream.empty() && !multiplex) {
|
||||
//指定了流id,那么一个端口一个流(不管是否包含多个ssrc的多个流,绑定rtp源后,会筛选掉ip端口不匹配的流)
|
||||
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), stream_id);
|
||||
helper = std::make_shared<RtcpHelper>(std::move(rtcp_socket), tuple);
|
||||
helper->startRtcp();
|
||||
helper->setRtpServerInfo(local_port, tcp_mode, re_use_port, ssrc, only_track);
|
||||
bool bind_peer_addr = false;
|
||||
@ -175,30 +176,34 @@ void RtpServer::start(uint16_t local_port, const string &stream_id, TcpMode tcp_
|
||||
udp_server = std::make_shared<UdpServer>();
|
||||
(*udp_server)[RtpSession::kOnlyTrack] = only_track;
|
||||
(*udp_server)[RtpSession::kUdpRecvBuffer] = udpRecvSocketBuffer;
|
||||
(*udp_server)[RtpSession::kVhost] = tuple.vhost;
|
||||
(*udp_server)[RtpSession::kApp] = tuple.app;
|
||||
udp_server->start<RtpSession>(local_port, local_ip);
|
||||
rtp_socket = nullptr;
|
||||
}
|
||||
|
||||
TcpServer::Ptr tcp_server;
|
||||
if (tcp_mode == PASSIVE || tcp_mode == ACTIVE) {
|
||||
//创建tcp服务器
|
||||
tcp_server = std::make_shared<TcpServer>();
|
||||
(*tcp_server)[RtpSession::kStreamID] = stream_id;
|
||||
auto processor = helper ? helper->getProcess() : nullptr;
|
||||
// 如果共享同一个processor对象,那么tcp server深圳为单线程模式确保线程安全
|
||||
tcp_server = std::make_shared<TcpServer>(processor ? poller : nullptr);
|
||||
(*tcp_server)[RtpSession::kVhost] = tuple.vhost;
|
||||
(*tcp_server)[RtpSession::kApp] = tuple.app;
|
||||
(*tcp_server)[RtpSession::kStreamID] = tuple.stream;
|
||||
(*tcp_server)[RtpSession::kSSRC] = ssrc;
|
||||
(*tcp_server)[RtpSession::kOnlyTrack] = only_track;
|
||||
if (tcp_mode == PASSIVE) {
|
||||
weak_ptr<RtpServer> weak_self = shared_from_this();
|
||||
auto processor = helper ? helper->getProcess() : nullptr;
|
||||
tcp_server->start<RtpSession>(local_port, local_ip, 1024, [weak_self, processor](std::shared_ptr<RtpSession> &session) {
|
||||
session->setRtpProcess(processor);
|
||||
});
|
||||
} else if (stream_id.empty()) {
|
||||
} else if (tuple.stream.empty()) {
|
||||
// tcp主动模式时只能一个端口一个流,必须指定流id; 创建TcpServer对象也仅用于传参
|
||||
throw std::runtime_error(StrPrinter << "tcp主动模式时必需指定流id");
|
||||
}
|
||||
}
|
||||
|
||||
_on_cleanup = [rtp_socket, stream_id]() {
|
||||
_on_cleanup = [rtp_socket]() {
|
||||
if (rtp_socket) {
|
||||
//去除循环引用
|
||||
rtp_socket->setOnRead(nullptr);
|
||||
|
@ -43,7 +43,7 @@ public:
|
||||
* @param ssrc 指定的ssrc
|
||||
* @param multiplex 多路复用
|
||||
*/
|
||||
void start(uint16_t local_port, const std::string &stream_id = "", TcpMode tcp_mode = PASSIVE,
|
||||
void start(uint16_t local_port, const MediaTuple &tuple = MediaTuple{DEFAULT_VHOST, kRtpAppName, "", ""}, TcpMode tcp_mode = PASSIVE,
|
||||
const char *local_ip = "::", bool re_use_port = true, uint32_t ssrc = 0, int only_track = 0, bool multiplex = false);
|
||||
|
||||
/**
|
||||
|
@ -21,6 +21,8 @@ using namespace toolkit;
|
||||
|
||||
namespace mediakit{
|
||||
|
||||
const string RtpSession::kVhost = "vhost";
|
||||
const string RtpSession::kApp = "app";
|
||||
const string RtpSession::kStreamID = "stream_id";
|
||||
const string RtpSession::kSSRC = "ssrc";
|
||||
const string RtpSession::kOnlyTrack = "only_track";
|
||||
@ -31,7 +33,9 @@ void RtpSession::attachServer(const Server &server) {
|
||||
}
|
||||
|
||||
void RtpSession::setParams(mINI &ini) {
|
||||
_stream_id = ini[kStreamID];
|
||||
_tuple.vhost = ini[kVhost];
|
||||
_tuple.app = ini[kApp];
|
||||
_tuple.stream = ini[kStreamID];
|
||||
_ssrc = ini[kSSRC];
|
||||
_only_track = ini[kOnlyTrack];
|
||||
int udp_socket_buffer = ini[kUdpRecvBuffer];
|
||||
@ -63,7 +67,7 @@ void RtpSession::onError(const SockException &err) {
|
||||
if (_emit_detach) {
|
||||
_process->onDetach(err);
|
||||
}
|
||||
WarnP(this) << _stream_id << " " << err;
|
||||
WarnP(this) << _tuple.shortUrl() << " " << err;
|
||||
}
|
||||
|
||||
void RtpSession::onManager() {
|
||||
@ -107,12 +111,12 @@ void RtpSession::onRtpPacket(const char *data, size_t len) {
|
||||
}
|
||||
|
||||
// 未指定流id就使用ssrc为流id
|
||||
if (_stream_id.empty()) {
|
||||
_stream_id = printSSRC(_ssrc);
|
||||
if (_tuple.stream.empty()) {
|
||||
_tuple.stream = printSSRC(_ssrc);
|
||||
}
|
||||
|
||||
if (!_process) {
|
||||
_process = RtpProcess::createProcess(_stream_id);
|
||||
_process = RtpProcess::createProcess(_tuple);
|
||||
_process->setOnlyTrack((RtpProcess::OnlyTrack)_only_track);
|
||||
weak_ptr<RtpSession> weak_self = static_pointer_cast<RtpSession>(shared_from_this());
|
||||
_process->setOnDetach([weak_self](const SockException &ex) {
|
||||
|
@ -22,6 +22,8 @@ namespace mediakit{
|
||||
|
||||
class RtpSession : public toolkit::Session, public RtpSplitter {
|
||||
public:
|
||||
static const std::string kVhost;
|
||||
static const std::string kApp;
|
||||
static const std::string kStreamID;
|
||||
static const std::string kSSRC;
|
||||
static const std::string kOnlyTrack;
|
||||
@ -54,7 +56,7 @@ private:
|
||||
int _only_track = 0;
|
||||
uint32_t _ssrc = 0;
|
||||
toolkit::Ticker _ticker;
|
||||
std::string _stream_id;
|
||||
MediaTuple _tuple;
|
||||
struct sockaddr_storage _addr;
|
||||
RtpProcess::Ptr _process;
|
||||
};
|
||||
|
@ -46,7 +46,7 @@ const char *RtpSplitter::onSearchPacketTail(const char *data, size_t len) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if ( _is_ehome ) {
|
||||
if (_check_ehome_count) {
|
||||
if (isEhome(data, len)) {
|
||||
//是ehome协议
|
||||
if (len < kEHOME_OFFSET + 4) {
|
||||
@ -59,7 +59,7 @@ const char *RtpSplitter::onSearchPacketTail(const char *data, size_t len) {
|
||||
//忽略ehome私有头
|
||||
return onSearchPacketTail_l(data + kEHOME_OFFSET + 2, len - kEHOME_OFFSET - 2);
|
||||
}
|
||||
_is_ehome = false;
|
||||
_check_ehome_count--;
|
||||
}
|
||||
|
||||
if ( _is_rtsp_interleaved ) {
|
||||
|
@ -31,7 +31,8 @@ protected:
|
||||
const char *onSearchPacketTail_l(const char *data, size_t len);
|
||||
|
||||
private:
|
||||
bool _is_ehome = true;
|
||||
bool _is_ehome = false;
|
||||
int _check_ehome_count = 3;
|
||||
bool _is_rtsp_interleaved = true;
|
||||
size_t _offset = 0;
|
||||
};
|
||||
|
@ -99,10 +99,10 @@ RtpMultiCaster::~RtpMultiCaster() {
|
||||
DebugL;
|
||||
}
|
||||
|
||||
RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port) {
|
||||
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA, vhost, app, stream));
|
||||
RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, const MediaTuple &tuple, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port) {
|
||||
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA, tuple.vhost, tuple.app, tuple.stream));
|
||||
if (!src) {
|
||||
auto err = StrPrinter << "未找到媒体源:" << vhost << " " << app << " " << stream << endl;
|
||||
auto err = StrPrinter << "未找到媒体源:" << tuple.shortUrl() << endl;
|
||||
throw std::runtime_error(err);
|
||||
}
|
||||
_multicast_ip = (multicast_ip) ? make_shared<uint32_t>(multicast_ip) : MultiCastAddressMaker::Instance().obtain();
|
||||
@ -144,7 +144,7 @@ RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, con
|
||||
});
|
||||
});
|
||||
|
||||
string strKey = StrPrinter << local_ip << " " << vhost << " " << app << " " << stream << endl;
|
||||
string strKey = StrPrinter << local_ip << " " << tuple.vhost << " " << tuple.app << " " << tuple.stream << endl;
|
||||
_rtp_reader->setDetachCB([this, strKey]() {
|
||||
{
|
||||
lock_guard<recursive_mutex> lck(g_mtx);
|
||||
@ -167,7 +167,7 @@ RtpMultiCaster::RtpMultiCaster(SocketHelper &helper, const string &local_ip, con
|
||||
DebugL << MultiCastAddressMaker::toString(*_multicast_ip) << " "
|
||||
<< _udp_sock[0]->get_local_port() << " "
|
||||
<< _udp_sock[1]->get_local_port() << " "
|
||||
<< vhost << " " << app << " " << stream;
|
||||
<< tuple.shortUrl();
|
||||
}
|
||||
|
||||
uint16_t RtpMultiCaster::getMultiCasterPort(TrackType trackType) {
|
||||
@ -180,17 +180,17 @@ string RtpMultiCaster::getMultiCasterIP() {
|
||||
return SockUtil::inet_ntoa(addr);
|
||||
}
|
||||
|
||||
RtpMultiCaster::Ptr RtpMultiCaster::get(SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port) {
|
||||
static auto on_create = [](SocketHelper &helper, const string &local_ip, const string &vhost, const string &app, const string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port){
|
||||
RtpMultiCaster::Ptr RtpMultiCaster::get(SocketHelper &helper, const string &local_ip, const MediaTuple &tuple, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port) {
|
||||
static auto on_create = [](SocketHelper &helper, const string &local_ip, const MediaTuple &tuple, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port){
|
||||
try {
|
||||
auto poller = helper.getPoller();
|
||||
auto ret = RtpMultiCaster::Ptr(new RtpMultiCaster(helper, local_ip, vhost, app, stream, multicast_ip, video_port, audio_port), [poller](RtpMultiCaster *ptr) {
|
||||
auto ret = RtpMultiCaster::Ptr(new RtpMultiCaster(helper, local_ip, tuple, multicast_ip, video_port, audio_port), [poller](RtpMultiCaster *ptr) {
|
||||
poller->async([ptr]() {
|
||||
delete ptr;
|
||||
});
|
||||
});
|
||||
lock_guard<recursive_mutex> lck(g_mtx);
|
||||
string strKey = StrPrinter << local_ip << " " << vhost << " " << app << " " << stream << endl;
|
||||
string strKey = StrPrinter << local_ip << " " << tuple.vhost << " " << tuple.app << " " << tuple.stream << endl;
|
||||
g_multi_caster_map.emplace(strKey, ret);
|
||||
return ret;
|
||||
} catch (std::exception &ex) {
|
||||
@ -199,16 +199,16 @@ RtpMultiCaster::Ptr RtpMultiCaster::get(SocketHelper &helper, const string &loca
|
||||
}
|
||||
};
|
||||
|
||||
string strKey = StrPrinter << local_ip << " " << vhost << " " << app << " " << stream << endl;
|
||||
string strKey = StrPrinter << local_ip << " " << tuple.vhost << " " << tuple.app << " " << tuple.stream << endl;
|
||||
lock_guard<recursive_mutex> lck(g_mtx);
|
||||
auto it = g_multi_caster_map.find(strKey);
|
||||
if (it == g_multi_caster_map.end()) {
|
||||
return on_create(helper, local_ip, vhost, app, stream, multicast_ip, video_port, audio_port);
|
||||
return on_create(helper, local_ip, tuple, multicast_ip, video_port, audio_port);
|
||||
}
|
||||
auto ret = it->second.lock();
|
||||
if (!ret) {
|
||||
g_multi_caster_map.erase(it);
|
||||
return on_create(helper, local_ip, vhost, app, stream, multicast_ip, video_port, audio_port);
|
||||
return on_create(helper, local_ip, tuple, multicast_ip, video_port, audio_port);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
@ -45,14 +45,14 @@ public:
|
||||
|
||||
~RtpMultiCaster();
|
||||
|
||||
static Ptr get(toolkit::SocketHelper &helper, const std::string &local_ip, const std::string &vhost, const std::string &app, const std::string &stream, uint32_t multicast_ip = 0, uint16_t video_port = 0, uint16_t audio_port = 0);
|
||||
static Ptr get(toolkit::SocketHelper &helper, const std::string &local_ip, const MediaTuple &tuple, uint32_t multicast_ip = 0, uint16_t video_port = 0, uint16_t audio_port = 0);
|
||||
void setDetachCB(void *listener,const onDetach &cb);
|
||||
|
||||
std::string getMultiCasterIP();
|
||||
uint16_t getMultiCasterPort(TrackType trackType);
|
||||
|
||||
private:
|
||||
RtpMultiCaster(toolkit::SocketHelper &helper, const std::string &local_ip, const std::string &vhost, const std::string &app, const std::string &stream, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port);
|
||||
RtpMultiCaster(toolkit::SocketHelper &helper, const std::string &local_ip, const MediaTuple &tuple, uint32_t multicast_ip, uint16_t video_port, uint16_t audio_port);
|
||||
|
||||
private:
|
||||
std::recursive_mutex _mtx;
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "Common/Parser.h"
|
||||
#include "Common/config.h"
|
||||
#include "Network/Socket.h"
|
||||
#include "Extension/Factory.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace toolkit;
|
||||
@ -236,10 +237,6 @@ void SdpParser::load(const string &sdp) {
|
||||
track._codec = codec;
|
||||
track._samplerate = samplerate;
|
||||
}
|
||||
if (!track._samplerate && track._type == TrackVideo) {
|
||||
// 未设置视频采样率时,赋值为90000
|
||||
track._samplerate = 90000;
|
||||
}
|
||||
++it;
|
||||
}
|
||||
|
||||
@ -260,6 +257,17 @@ void SdpParser::load(const string &sdp) {
|
||||
if (it != track._attr.end()) {
|
||||
track._control = it->second;
|
||||
}
|
||||
|
||||
if (!track._samplerate && track._type == TrackVideo) {
|
||||
// 未设置视频采样率时,赋值为90000
|
||||
track._samplerate = 90000;
|
||||
} else if (!track._samplerate && track._type == TrackAudio) {
|
||||
// some rtsp sdp no sample rate but has fmt config to parser get sample rate
|
||||
auto t = Factory::getTrackBySdp(track_ptr);
|
||||
if (t) {
|
||||
track._samplerate = std::static_pointer_cast<AudioTrack>(t)->getAudioSampleRate();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,13 @@ public:
|
||||
*/
|
||||
RtspMediaSource(const MediaTuple& tuple, int ring_size = RTP_GOP_SIZE): MediaSource(RTSP_SCHEMA, tuple), _ring_size(ring_size) {}
|
||||
|
||||
~RtspMediaSource() override { flush(); }
|
||||
~RtspMediaSource() override {
|
||||
try {
|
||||
flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取媒体源的环形缓冲
|
||||
|
@ -29,7 +29,13 @@ public:
|
||||
getRtpRing()->setDelegate(_media_src);
|
||||
}
|
||||
|
||||
~RtspMediaSourceMuxer() override { RtspMuxer::flush(); }
|
||||
~RtspMediaSourceMuxer() override {
|
||||
try {
|
||||
RtspMuxer::flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
|
||||
setDelegate(listener);
|
||||
|
@ -88,6 +88,7 @@ void RtspPlayer::play(const string &strUrl) {
|
||||
_rtp_type = (Rtsp::eRtpType)(int)(*this)[Client::kRtpType];
|
||||
_beat_type = (*this)[Client::kRtspBeatType].as<int>();
|
||||
_beat_interval_ms = (*this)[Client::kBeatIntervalMS].as<int>();
|
||||
_speed = (*this)[Client::kRtspSpeed].as<float>();
|
||||
DebugL << url._url << " " << (url._user.size() ? url._user : "null") << " " << (url._passwd.size() ? url._passwd : "null") << " " << _rtp_type;
|
||||
|
||||
weak_ptr<RtspPlayer> weakSelf = static_pointer_cast<RtspPlayer>(shared_from_this());
|
||||
@ -256,17 +257,19 @@ void RtspPlayer::sendSetup(unsigned int track_idx) {
|
||||
switch (_rtp_type) {
|
||||
case Rtsp::RTP_TCP: {
|
||||
sendRtspRequest(
|
||||
"SETUP", control_url, { "Transport", StrPrinter << "RTP/AVP/TCP;unicast;interleaved=" << track->_type * 2 << "-" << track->_type * 2 + 1 });
|
||||
"SETUP", control_url,
|
||||
{ "Transport", StrPrinter << "RTP/AVP/TCP;unicast;interleaved=" << track->_type * 2 << "-" << track->_type * 2 + 1 << ";mode=play" });
|
||||
} break;
|
||||
case Rtsp::RTP_MULTICAST: {
|
||||
sendRtspRequest("SETUP", control_url, { "Transport", "RTP/AVP;multicast" });
|
||||
sendRtspRequest("SETUP", control_url, { "Transport", "RTP/AVP;multicast;mode=play" });
|
||||
} break;
|
||||
case Rtsp::RTP_UDP: {
|
||||
createUdpSockIfNecessary(track_idx);
|
||||
sendRtspRequest(
|
||||
"SETUP", control_url,
|
||||
{ "Transport",
|
||||
StrPrinter << "RTP/AVP;unicast;client_port=" << _rtp_sock[track_idx]->get_local_port() << "-" << _rtcp_sock[track_idx]->get_local_port() });
|
||||
StrPrinter << "RTP/AVP;unicast;client_port=" << _rtp_sock[track_idx]->get_local_port() << "-" << _rtcp_sock[track_idx]->get_local_port()
|
||||
<< ";mode=play" });
|
||||
} break;
|
||||
default: break;
|
||||
}
|
||||
@ -387,7 +390,12 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int track_idx) {
|
||||
}
|
||||
// 所有setup命令发送完毕
|
||||
// 发送play命令
|
||||
if (_speed==0.0f) {
|
||||
sendPause(type_play, 0);
|
||||
} else {
|
||||
sendPause(type_speed, 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void RtspPlayer::sendDescribe() {
|
||||
@ -436,6 +444,9 @@ void RtspPlayer::sendPause(int type, uint32_t seekMS) {
|
||||
case type_seek:
|
||||
sendRtspRequest("PLAY", _control_url, { "Range", StrPrinter << "npt=" << setiosflags(ios::fixed) << setprecision(2) << seekMS / 1000.0 << "-" });
|
||||
break;
|
||||
case type_speed:
|
||||
speed(_speed);
|
||||
break;
|
||||
default:
|
||||
WarnL << "unknown type : " << type;
|
||||
_on_response = nullptr;
|
||||
|
@ -120,6 +120,8 @@ private:
|
||||
uint32_t _beat_interval_ms = 0;
|
||||
|
||||
std::string _play_url;
|
||||
// rtsp开始倍速
|
||||
float _speed= 0.0f;
|
||||
std::vector<SdpTrack::Ptr> _sdp_track;
|
||||
std::function<void(const Parser&)> _on_response;
|
||||
//RTP端口,trackid idx 为数组下标
|
||||
|
@ -796,7 +796,7 @@ void RtspSession::handleReq_Setup(const Parser &parser) {
|
||||
break;
|
||||
case Rtsp::RTP_MULTICAST: {
|
||||
if(!_multicaster){
|
||||
_multicaster = RtpMultiCaster::get(*this, get_local_ip(), _media_info.vhost, _media_info.app, _media_info.stream, _multicast_ip, _multicast_video_port, _multicast_audio_port);
|
||||
_multicaster = RtpMultiCaster::get(*this, get_local_ip(), _media_info, _multicast_ip, _multicast_video_port, _multicast_audio_port);
|
||||
if (!_multicaster) {
|
||||
send_NotAcceptable();
|
||||
throw SockException(Err_shutdown, "can not get a available udp multicast socket");
|
||||
|
@ -40,7 +40,13 @@ public:
|
||||
|
||||
TSMediaSource(const MediaTuple& tuple, int ring_size = TS_GOP_SIZE): MediaSource(TS_SCHEMA, tuple), _ring_size(ring_size) {}
|
||||
|
||||
~TSMediaSource() override { flush(); }
|
||||
~TSMediaSource() override {
|
||||
try {
|
||||
flush();
|
||||
} catch (std::exception &ex) {
|
||||
WarnL << ex.what();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取媒体源的环形缓冲
|
||||
|
@ -224,7 +224,8 @@ int main(int argc, char *argv[]) {
|
||||
option.enable_mp4 = false;
|
||||
option.modify_stamp = (int)ProtocolOption::kModifyStampRelative;
|
||||
//添加拉流代理
|
||||
auto proxy = std::make_shared<PlayerProxy>(DEFAULT_VHOST, "app", std::to_string(i), option, -1, nullptr, 1);
|
||||
auto tuple = MediaTuple { DEFAULT_VHOST, "app", std::to_string(i), "" };
|
||||
auto proxy = std::make_shared<PlayerProxy>(tuple, option, -1, nullptr, 1);
|
||||
//开始拉流代理
|
||||
proxy->play(input_urls[i]);
|
||||
proxy_map.emplace(i, std::move(proxy));
|
||||
|
@ -137,7 +137,8 @@ int main(int argc, char *argv[]) {
|
||||
option.enable_mp4 = false;
|
||||
for (auto i = 0; i < proxy_count; ++i) {
|
||||
auto stream = to_string(i);
|
||||
PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "live", stream, option));
|
||||
auto tuple = MediaTuple{DEFAULT_VHOST, "live", stream, ""};
|
||||
PlayerProxy::Ptr player(new PlayerProxy(tuple, option));
|
||||
(*player)[Client::kRtpType] = rtp_type;
|
||||
player->play(in_url);
|
||||
proxyMap.emplace(stream, player);
|
||||
|
@ -148,9 +148,10 @@ int main(int argc, char *argv[]) {
|
||||
MediaSource::Ptr src = nullptr;
|
||||
PlayerProxy::Ptr proxy = nullptr;;
|
||||
|
||||
auto tuple = MediaTuple { DEFAULT_VHOST, app, stream, "" };
|
||||
if (end_with(in_url, ".mp4")) {
|
||||
// create MediaSource from mp4file
|
||||
auto reader = std::make_shared<MP4Reader>(DEFAULT_VHOST, app, stream, in_url);
|
||||
auto reader = std::make_shared<MP4Reader>(tuple, in_url);
|
||||
//mp4 repeat
|
||||
reader->startReadMP4(0, true, true);
|
||||
src = MediaSource::find(schema, DEFAULT_VHOST, app, stream, false);
|
||||
@ -161,7 +162,7 @@ int main(int argc, char *argv[]) {
|
||||
}
|
||||
} else {
|
||||
//添加拉流代理
|
||||
proxy = std::make_shared<PlayerProxy>(DEFAULT_VHOST, app, stream, option);
|
||||
proxy = std::make_shared<PlayerProxy>(tuple, option);
|
||||
//rtsp拉流代理方式
|
||||
(*proxy)[Client::kRtpType] = rtp_type;
|
||||
//开始拉流代理
|
||||
|
@ -79,7 +79,8 @@ int domain(const string &playUrl, const string &pushUrl) {
|
||||
ProtocolOption option;
|
||||
option.enable_hls = false;
|
||||
option.enable_mp4 = false;
|
||||
PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "app", "stream", option, -1, poller));
|
||||
auto tuple = MediaTuple{DEFAULT_VHOST, "app", "stream", ""};
|
||||
PlayerProxy::Ptr player(new PlayerProxy(tuple, option, -1, poller));
|
||||
//可以指定rtsp拉流方式,支持tcp和udp方式,默认tcp
|
||||
// (*player)[Client::kRtpType] = Rtsp::RTP_UDP;
|
||||
player->play(playUrl.data());
|
||||
|
@ -43,7 +43,8 @@ int domain(const string &file, const string &url) {
|
||||
mINI::Instance()["protocol.enable_" + schema] = 1;
|
||||
|
||||
// 从mp4文件加载生成MediaSource对象
|
||||
auto reader = std::make_shared<MP4Reader>(DEFAULT_VHOST, "live", "stream", file);
|
||||
auto tuple = MediaTuple {DEFAULT_VHOST, "live", "stream", ""};
|
||||
auto reader = std::make_shared<MP4Reader>(tuple, file);
|
||||
// 开始加载mp4,ref_self设置为false,这样reader对象设置为nullptr就能注销了,file_repeat可以设置为空,这样文件读完了就停止推流了
|
||||
reader->startReadMP4(100, false, true);
|
||||
auto src = MediaSource::find(schema, DEFAULT_VHOST, "live", "stream", false);
|
||||
|
@ -42,7 +42,7 @@ static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
|
||||
memset(&addr, 0, sizeof(addr));
|
||||
addr.ss_family = AF_INET;
|
||||
auto sock = Socket::createSocket(poller);
|
||||
auto process = RtpProcess::createProcess("test");
|
||||
auto process = RtpProcess::createProcess(MediaTuple { DEFAULT_VHOST, kRtpAppName, "test", "" });
|
||||
|
||||
uint64_t stamp_last = 0;
|
||||
auto total_size = std::make_shared<size_t>(0);
|
||||
@ -75,7 +75,7 @@ static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
auto diff = stamp - stamp_last;
|
||||
auto diff = static_cast<int64_t>(stamp - stamp_last);
|
||||
if (diff < 0 || diff > 500) {
|
||||
diff = 1;
|
||||
}
|
||||
|
@ -230,8 +230,8 @@ int main(int argc,char *argv[]) {
|
||||
//http://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
|
||||
//rtsp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
|
||||
//rtmp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
|
||||
|
||||
PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "live", std::string("chn") + to_string(i).data(), ProtocolOption()));
|
||||
auto tuple = MediaTuple{DEFAULT_VHOST, "live", std::string("chn") + to_string(i).data(), ""};
|
||||
PlayerProxy::Ptr player(new PlayerProxy(tuple, ProtocolOption()));
|
||||
//指定RTP over TCP(播放rtsp时有效)
|
||||
(*player)[Client::kRtpType] = Rtsp::RTP_TCP;
|
||||
//开始播放,如果播放失败或者播放中止,将会自动重试若干次,重试次数在配置文件中配置,默认一直重试
|
||||
|
@ -19,12 +19,13 @@ namespace mediakit {
|
||||
// RTC配置项目
|
||||
namespace Rtc {
|
||||
#define RTC_FIELD "rtc."
|
||||
//~ nack接收端
|
||||
// Nack缓存包最早时间间隔
|
||||
const string kMaxNackMS = RTC_FIELD "maxNackMS";
|
||||
// Nack包检查间隔(包数量)
|
||||
const string kRtpCacheCheckInterval = RTC_FIELD "rtpCacheCheckInterval";
|
||||
//~ nack发送端
|
||||
//~ nack接收端, rtp发送端
|
||||
// rtp重发缓存列队最大长度,单位毫秒
|
||||
const string kMaxRtpCacheMS = RTC_FIELD "maxRtpCacheMS";
|
||||
// rtp重发缓存列队最大长度,单位个数
|
||||
const string kMaxRtpCacheSize = RTC_FIELD "maxRtpCacheSize";
|
||||
|
||||
//~ nack发送端,rtp接收端
|
||||
//最大保留的rtp丢包状态个数
|
||||
const string kNackMaxSize = RTC_FIELD "nackMaxSize";
|
||||
// rtp丢包状态最长保留时间
|
||||
@ -37,8 +38,8 @@ const string kNackIntervalRatio = RTC_FIELD "nackIntervalRatio";
|
||||
const string kNackRtpSize = RTC_FIELD "nackRtpSize";
|
||||
|
||||
static onceToken token([]() {
|
||||
mINI::Instance()[kMaxNackMS] = 5 * 1000;
|
||||
mINI::Instance()[kRtpCacheCheckInterval] = 100;
|
||||
mINI::Instance()[kMaxRtpCacheMS] = 5 * 1000;
|
||||
mINI::Instance()[kMaxRtpCacheSize] = 2048;
|
||||
mINI::Instance()[kNackMaxSize] = 2048;
|
||||
mINI::Instance()[kNackMaxMS] = 3 * 1000;
|
||||
mINI::Instance()[kNackMaxCount] = 15;
|
||||
@ -49,17 +50,26 @@ static onceToken token([]() {
|
||||
} // namespace Rtc
|
||||
|
||||
void NackList::pushBack(RtpPacket::Ptr rtp) {
|
||||
GET_CONFIG(uint32_t, max_rtp_cache_ms, Rtc::kMaxRtpCacheMS);
|
||||
GET_CONFIG(uint32_t, max_rtp_cache_size, Rtc::kMaxRtpCacheSize);
|
||||
|
||||
// 记录rtp
|
||||
auto seq = rtp->getSeq();
|
||||
_nack_cache_seq.emplace_back(seq);
|
||||
_nack_cache_pkt.emplace(seq, std::move(rtp));
|
||||
GET_CONFIG(uint32_t, rtpcache_checkinterval, Rtc::kRtpCacheCheckInterval);
|
||||
if (++_cache_ms_check < rtpcache_checkinterval) {
|
||||
|
||||
// 限制rtp缓存最大个数
|
||||
if (_nack_cache_seq.size() > max_rtp_cache_size) {
|
||||
popFront();
|
||||
}
|
||||
|
||||
if (++_cache_ms_check < 100) {
|
||||
// 每100个rtp包检测下缓存长度,节省cpu资源
|
||||
return;
|
||||
}
|
||||
_cache_ms_check = 0;
|
||||
GET_CONFIG(uint32_t, maxnackms, Rtc::kMaxNackMS);
|
||||
while (getCacheMS() >= maxnackms) {
|
||||
// 需要清除部分nack缓存
|
||||
// 限制rtp缓存最大时长
|
||||
while (getCacheMS() >= max_rtp_cache_ms) {
|
||||
popFront();
|
||||
}
|
||||
}
|
||||
@ -96,13 +106,13 @@ RtpPacket::Ptr *NackList::getRtp(uint16_t seq) {
|
||||
|
||||
uint32_t NackList::getCacheMS() {
|
||||
while (_nack_cache_seq.size() > 2) {
|
||||
auto back_stamp = getRtpStamp(_nack_cache_seq.back());
|
||||
auto back_stamp = getNtpStamp(_nack_cache_seq.back());
|
||||
if (back_stamp == -1) {
|
||||
_nack_cache_seq.pop_back();
|
||||
continue;
|
||||
}
|
||||
|
||||
auto front_stamp = getRtpStamp(_nack_cache_seq.front());
|
||||
auto front_stamp = getNtpStamp(_nack_cache_seq.front());
|
||||
if (front_stamp == -1) {
|
||||
_nack_cache_seq.pop_front();
|
||||
continue;
|
||||
@ -111,18 +121,19 @@ uint32_t NackList::getCacheMS() {
|
||||
if (back_stamp >= front_stamp) {
|
||||
return back_stamp - front_stamp;
|
||||
}
|
||||
// 很有可能回环了
|
||||
return back_stamp + (UINT32_MAX - front_stamp);
|
||||
// ntp时间戳回退了,非法数据,丢掉
|
||||
_nack_cache_seq.pop_front();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int64_t NackList::getRtpStamp(uint16_t seq) {
|
||||
int64_t NackList::getNtpStamp(uint16_t seq) {
|
||||
auto it = _nack_cache_pkt.find(seq);
|
||||
if (it == _nack_cache_pkt.end()) {
|
||||
return -1;
|
||||
}
|
||||
return it->second->getStampMS(false);
|
||||
// 使用ntp时间戳,不会回退
|
||||
return it->second->getStampMS(true);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -20,6 +20,15 @@
|
||||
|
||||
namespace mediakit {
|
||||
|
||||
// RTC配置项目
|
||||
namespace Rtc {
|
||||
//~ nack发送端,rtp接收端
|
||||
// 最大保留的rtp丢包状态个数
|
||||
extern const std::string kNackMaxSize;
|
||||
// rtp丢包状态最长保留时间
|
||||
extern const std::string kNackMaxMS;
|
||||
} // namespace Rtc
|
||||
|
||||
class NackList {
|
||||
public:
|
||||
void pushBack(RtpPacket::Ptr rtp);
|
||||
@ -28,7 +37,7 @@ public:
|
||||
private:
|
||||
void popFront();
|
||||
uint32_t getCacheMS();
|
||||
int64_t getRtpStamp(uint16_t seq);
|
||||
int64_t getNtpStamp(uint16_t seq);
|
||||
RtpPacket::Ptr *getRtp(uint16_t seq);
|
||||
|
||||
private:
|
||||
@ -66,7 +75,7 @@ private:
|
||||
struct NackStatus {
|
||||
uint64_t first_stamp;
|
||||
uint64_t update_stamp;
|
||||
int nack_count = 0;
|
||||
uint32_t nack_count = 0;
|
||||
};
|
||||
std::map<uint16_t /*seq*/, NackStatus> _nack_send_status;
|
||||
};
|
||||
|
@ -9,7 +9,10 @@
|
||||
*/
|
||||
|
||||
#include "WebRtcPlayer.h"
|
||||
|
||||
#include "Common/config.h"
|
||||
#include "Extension/Factory.h"
|
||||
#include "Util/base64.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
@ -32,6 +35,9 @@ WebRtcPlayer::WebRtcPlayer(const EventPoller::Ptr &poller,
|
||||
_media_info = info;
|
||||
_play_src = src;
|
||||
CHECK(src);
|
||||
|
||||
GET_CONFIG(bool, direct_proxy, Rtsp::kDirectProxy);
|
||||
_send_config_frames_once = direct_proxy;
|
||||
}
|
||||
|
||||
void WebRtcPlayer::onStartWebRTC() {
|
||||
@ -56,6 +62,13 @@ void WebRtcPlayer::onStartWebRTC() {
|
||||
if (!strong_self) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (strong_self->_send_config_frames_once && !pkt->empty()) {
|
||||
const auto &first_rtp = pkt->front();
|
||||
strong_self->sendConfigFrames(first_rtp->getSeq(), first_rtp->sample_rate, first_rtp->getStamp(), first_rtp->ntp_stamp);
|
||||
strong_self->_send_config_frames_once = false;
|
||||
}
|
||||
|
||||
size_t i = 0;
|
||||
pkt->for_each([&](const RtpPacket::Ptr &rtp) {
|
||||
//TraceL<<"send track type:"<<rtp->type<<" ts:"<<rtp->getStamp()<<" ntp:"<<rtp->ntp_stamp<<" size:"<<rtp->getPayloadSize()<<" i:"<<i;
|
||||
@ -111,4 +124,41 @@ void WebRtcPlayer::onRtcConfigure(RtcConfigure &configure) const {
|
||||
configure.setPlayRtspInfo(playSrc->getSdp());
|
||||
}
|
||||
|
||||
void WebRtcPlayer::sendConfigFrames(uint32_t before_seq, uint32_t sample_rate, uint32_t timestamp, uint64_t ntp_timestamp) {
|
||||
auto play_src = _play_src.lock();
|
||||
if (!play_src) {
|
||||
return;
|
||||
}
|
||||
SdpParser parser(play_src->getSdp());
|
||||
auto video_sdp = parser.getTrack(TrackVideo);
|
||||
if (!video_sdp) {
|
||||
return;
|
||||
}
|
||||
auto video_track = dynamic_pointer_cast<VideoTrack>(Factory::getTrackBySdp(video_sdp));
|
||||
if (!video_track) {
|
||||
return;
|
||||
}
|
||||
auto frames = video_track->getConfigFrames();
|
||||
if (frames.empty()) {
|
||||
return;
|
||||
}
|
||||
auto encoder = mediakit::Factory::getRtpEncoderByCodecId(video_track->getCodecId(), 0);
|
||||
if (!encoder) {
|
||||
return;
|
||||
}
|
||||
|
||||
GET_CONFIG(uint32_t, video_mtu, Rtp::kVideoMtuSize);
|
||||
encoder->setRtpInfo(0, video_mtu, sample_rate, 0, 0, 0);
|
||||
|
||||
auto seq = before_seq - frames.size();
|
||||
for (const auto &frame : frames) {
|
||||
auto rtp = encoder->getRtpInfo().makeRtp(TrackVideo, frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(), false, 0);
|
||||
auto header = rtp->getHeader();
|
||||
header->seq = htons(seq++);
|
||||
header->stamp = htonl(timestamp);
|
||||
rtp->ntp_stamp = ntp_timestamp;
|
||||
onSendRtp(rtp, false);
|
||||
}
|
||||
}
|
||||
|
||||
}// namespace mediakit
|
@ -31,11 +31,17 @@ protected:
|
||||
private:
|
||||
WebRtcPlayer(const EventPoller::Ptr &poller, const RtspMediaSource::Ptr &src, const MediaInfo &info);
|
||||
|
||||
void sendConfigFrames(uint32_t before_seq, uint32_t sample_rate, uint32_t timestamp, uint64_t ntp_timestamp);
|
||||
|
||||
private:
|
||||
//媒体相关元数据
|
||||
MediaInfo _media_info;
|
||||
//播放的rtsp源
|
||||
std::weak_ptr<RtspMediaSource> _play_src;
|
||||
|
||||
// rtp 直接转发情况下通常会缺少 sps/pps, 在转发 rtp 前, 先发送一次相关帧信息, 部分情况下是可以播放的
|
||||
bool _send_config_frames_once { false };
|
||||
|
||||
//播放rtsp源的reader对象
|
||||
RtspMediaSource::RingType::RingReader::Ptr _reader;
|
||||
};
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "Util/base64.h"
|
||||
#include "Network/sockutil.h"
|
||||
#include "Common/config.h"
|
||||
#include "Nack.h"
|
||||
#include "RtpExt.h"
|
||||
#include "Rtcp/Rtcp.h"
|
||||
#include "Rtcp/RtcpFCI.h"
|
||||
@ -57,9 +58,6 @@ const string kMinBitrate = RTC_FIELD "min_bitrate";
|
||||
// 数据通道设置
|
||||
const string kDataChannelEcho = RTC_FIELD "datachannel_echo";
|
||||
|
||||
// rtp丢包状态最长保留时间
|
||||
const string kNackMaxMS = RTC_FIELD "nackMaxMS";
|
||||
|
||||
static onceToken token([]() {
|
||||
mINI::Instance()[kTimeOutSec] = 15;
|
||||
mINI::Instance()[kExternIP] = "";
|
||||
@ -72,8 +70,6 @@ static onceToken token([]() {
|
||||
mINI::Instance()[kMinBitrate] = 0;
|
||||
|
||||
mINI::Instance()[kDataChannelEcho] = true;
|
||||
|
||||
mINI::Instance()[kNackMaxMS] = 3 * 1000;
|
||||
});
|
||||
|
||||
} // namespace RTC
|
||||
@ -806,7 +802,8 @@ public:
|
||||
setOnSorted(std::move(cb));
|
||||
//设置jitter buffer参数
|
||||
GET_CONFIG(uint32_t, nack_maxms, Rtc::kNackMaxMS);
|
||||
RtpTrackImp::setParams(1024, nack_maxms, 512);
|
||||
GET_CONFIG(uint32_t, nack_max_rtp, Rtc::kNackMaxSize);
|
||||
RtpTrackImp::setParams(nack_max_rtp, nack_maxms, nack_max_rtp / 2);
|
||||
_nack_ctx.setOnNack([this](const FCI_NACK &nack) { onNack(nack); });
|
||||
}
|
||||
|
||||
|
1249
www/ZLMRTCClient.js
1249
www/ZLMRTCClient.js
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -3,17 +3,30 @@
|
||||
<head>
|
||||
<title>ZLM RTC demo</title>
|
||||
<script src="./ZLMRTCClient.js"></script>
|
||||
<script src="https://unpkg.com/vconsole@latest/dist/vconsole.min.js"></script>
|
||||
<script>
|
||||
// VConsole will be exported to `window.VConsole` by default.
|
||||
var vConsole = new window.VConsole();
|
||||
</script>
|
||||
<style>
|
||||
video {
|
||||
width: 40vw;
|
||||
max-height: 50vh;
|
||||
height: 22.5vw; /* 默认和宽:高为 16:9 */
|
||||
object-fit: contain;
|
||||
background-color: grey;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div style="text-align: center;">
|
||||
<div>
|
||||
<video id='video' controls autoplay style="text-align:left;">
|
||||
<video id='video' controls autoplay>
|
||||
Your browser is too old which doesn't support HTML5 video.
|
||||
</video>
|
||||
|
||||
<video id='selfVideo' controls autoplay style="text-align:right;">
|
||||
<video id='selfVideo' controls autoplay>
|
||||
Your browser is too old which doesn't support HTML5 video.
|
||||
</video>
|
||||
</div>
|
||||
@ -27,7 +40,6 @@
|
||||
</div>
|
||||
|
||||
<div style="float: right; width: 70%">
|
||||
|
||||
<p>
|
||||
<label for="streamUrl">url:</label>
|
||||
<input type="text" style="co; width:70%" id='streamUrl' value="http://192.168.1.101/index/api/webrtc?app=live&stream=xiong&type=play">
|
||||
@ -82,9 +94,24 @@
|
||||
<button onclick="send()">发送(send by datachannel)</button>
|
||||
<button onclick="close()">关闭(close datachannel)</button>
|
||||
|
||||
<p>
|
||||
<label for="videoDevice">videodevice:</label>
|
||||
<select id="videoDevice">
|
||||
</select>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<label for="audioDevice">audiodevice:</label>
|
||||
<select id="audioDevice">
|
||||
</select>
|
||||
</p>
|
||||
|
||||
|
||||
<p>
|
||||
<label for="switchDevice">switchDevice:</label>
|
||||
<input type="checkbox" id='switchDevice' checked="checked">
|
||||
</p>
|
||||
<button onclick="switchVideo()">切换视频(switch video)</button>
|
||||
<button onclick="switchAudio()">切换音频(switch audio)</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -132,6 +159,24 @@
|
||||
document.getElementById("resolution").add(opt,null);
|
||||
});
|
||||
|
||||
ZLMRTCClient.GetAllMediaDevice().then(devices=>{
|
||||
devices.forEach(device=>{
|
||||
opt = document.createElement('option');
|
||||
opt.text = device.label + ":"+device.deviceId
|
||||
opt.value = JSON.stringify(device)
|
||||
if(device.kind == 'videoinput'){
|
||||
document.getElementById("videoDevice").add(opt,null)
|
||||
}else if(device.kind == 'audioinput'){
|
||||
document.getElementById("audioDevice").add(opt,null)
|
||||
}else if(device.kind == 'audiooutput'){
|
||||
// useless
|
||||
//console.error('not support device')
|
||||
}
|
||||
})
|
||||
}).catch(e=>{
|
||||
console.error(e);
|
||||
})
|
||||
|
||||
function start_play(){
|
||||
let elr = document.getElementById("resolution");
|
||||
let res = elr.options[elr.selectedIndex].text.match(/\d+/g);
|
||||
@ -157,6 +202,21 @@
|
||||
window.history.pushState(null, null, newUrl);
|
||||
}
|
||||
|
||||
let elv = document.getElementById("videoDevice");
|
||||
let ela = document.getElementById("audioDevice");
|
||||
|
||||
let vdevid = ''
|
||||
let adevid = ''
|
||||
|
||||
if (!recvOnly) {
|
||||
if (elv.selectedIndex !== -1) {
|
||||
vdevid = JSON.parse(elv.options[elv.selectedIndex].value).deviceId
|
||||
}
|
||||
if (ela.selectedIndex !== -1) {
|
||||
adevid = JSON.parse(ela.options[ela.selectedIndex].value).deviceId
|
||||
}
|
||||
}
|
||||
|
||||
player = new ZLMRTCClient.Endpoint(
|
||||
{
|
||||
element: document.getElementById('video'),// video 标签
|
||||
@ -169,6 +229,8 @@
|
||||
recvOnly:recvOnly,
|
||||
resolution:{w,h},
|
||||
usedatachannel:document.getElementById('datachannel').checked,
|
||||
videoId:vdevid, // 不填选择默认的
|
||||
audioId:adevid, // 不填选择默认的
|
||||
}
|
||||
);
|
||||
|
||||
@ -344,6 +406,39 @@
|
||||
// get_media_list();
|
||||
}, 5000);
|
||||
|
||||
function switchVideo(){
|
||||
if(player){
|
||||
// first arg bool false mean switch to screen video , second ignore
|
||||
// true mean switch to video , second is camera deviceid
|
||||
let elv = document.getElementById("videoDevice");
|
||||
let vdevid = JSON.parse(elv.options[elv.selectedIndex].value).deviceId
|
||||
player.switchVideo(document.getElementById('switchDevice').checked,vdevid).then(()=>{
|
||||
// switch video successful
|
||||
|
||||
}).catch(e=>{
|
||||
// switch video failed
|
||||
console.error(e);
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function switchAudio(){
|
||||
if(player){
|
||||
// first arg bool false mean switch to screen audio , second ignore
|
||||
// true mean switch to mic , second is mic deviceid
|
||||
let ela = document.getElementById("audioDevice");
|
||||
let adevid = JSON.parse(ela.options[ela.selectedIndex].value).deviceId
|
||||
player.switcAudio(document.getElementById('switchDevice').checked,adevid).then(()=>{
|
||||
// switch audio successful
|
||||
|
||||
}).catch(e=>{
|
||||
// switch audio failed
|
||||
console.error(e);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
</body>
|
||||
|
Loading…
Reference in New Issue
Block a user