Merge pull request #17 from xiongziliang/master

update
This commit is contained in:
baiyfcu 2020-06-17 15:58:41 +08:00 committed by GitHub
commit a46218965a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
118 changed files with 3386 additions and 2067 deletions

@ -1 +1 @@
Subproject commit 4ede70fc435eb0a4d3a752b521170d86440b3935
Subproject commit 5030af90126ea8f01ded6744ae8abdf549d00a81

33
3rdpart/assert.h Normal file
View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ZLMEDIAKIT_ASSERT_H
#define ZLMEDIAKIT_ASSERT_H
#include <stdio.h>
#ifndef NDEBUG
#ifdef assert
#undef assert
#endif//assert
#ifdef __cplusplus
extern "C" {
#endif
extern void Assert_Throw(int failed, const char *exp, const char *func, const char *file, int line);
#ifdef __cplusplus
}
#endif
#define assert(exp) Assert_Throw(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__);
#else
#define assert(e) ((void)0)
#endif//NDEBUG
#endif //ZLMEDIAKIT_ASSERT_H

@ -1 +1 @@
Subproject commit abc08f61bb1250b94d252cfeaea249527912dd3b
Subproject commit 576216c64bf3bcdc5e787da2adb3e169bdd97118

View File

@ -39,6 +39,7 @@ set(MediaServer_Root ${CMAKE_CURRENT_SOURCE_DIR}/3rdpart/media-server)
#
INCLUDE_DIRECTORIES(${ToolKit_Root})
INCLUDE_DIRECTORIES(${MediaKit_Root})
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/3rdpart)
set(ENABLE_HLS true)
set(ENABLE_OPENSSL true)
@ -57,6 +58,8 @@ if (OPENSSL_FOUND AND ENABLE_OPENSSL)
include_directories(${OPENSSL_INCLUDE_DIR})
add_definitions(-DENABLE_OPENSSL)
list(APPEND LINK_LIB_LIST ${OPENSSL_LIBRARIES})
else()
message(WARNING "openssl未找到rtmp将不支持flash播放器https/wss/rtsps/rtmps也将失效")
endif ()
#mysql
@ -104,9 +107,9 @@ if(ENABLE_HLS)
message(STATUS "ENABLE_HLS defined")
add_definitions(-DENABLE_HLS)
include_directories(${MediaServer_Root}/libmpeg/include)
aux_source_directory(${MediaServer_Root}/libmpeg/include src_mpeg)
aux_source_directory(${MediaServer_Root}/libmpeg/source src_mpeg)
include_directories(${MediaServer_Root}/libmpeg/include)
add_library(mpeg STATIC ${src_mpeg})
list(APPEND LINK_LIB_LIST mpeg)
@ -121,13 +124,14 @@ if(ENABLE_MP4)
message(STATUS "ENABLE_MP4 defined")
add_definitions(-DENABLE_MP4)
include_directories(${MediaServer_Root}/libmov/include)
include_directories(${MediaServer_Root}/libflv/include)
aux_source_directory(${MediaServer_Root}/libmov/include src_mov)
aux_source_directory(${MediaServer_Root}/libmov/source src_mov)
include_directories(${MediaServer_Root}/libmov/include)
aux_source_directory(${MediaServer_Root}/libflv/include src_flv)
aux_source_directory(${MediaServer_Root}/libflv/source src_flv)
include_directories(${MediaServer_Root}/libflv/include)
add_library(mov STATIC ${src_mov})
add_library(flv STATIC ${src_flv})
@ -141,10 +145,11 @@ endif()
#rtprtpps/ts
if(ENABLE_RTPPROXY AND ENABLE_HLS)
message(STATUS "ENABLE_RTPPROXY defined")
include_directories(${MediaServer_Root}/librtp/include)
aux_source_directory(${MediaServer_Root}/librtp/include src_rtp)
aux_source_directory(${MediaServer_Root}/librtp/source src_rtp)
aux_source_directory(${MediaServer_Root}/librtp/payload src_rtp)
include_directories(${MediaServer_Root}/librtp/include)
add_library(rtp STATIC ${src_rtp})
add_definitions(-DENABLE_RTPPROXY)
list(APPEND LINK_LIB_LIST rtp)

View File

@ -1,4 +1,4 @@
![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/logo.png)
![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/www/logo.png)
[english readme](https://github.com/xiongziliang/ZLMediaKit/blob/master/README_en.md)
@ -30,22 +30,21 @@
## 功能清单
- RTSP
- RTSP 服务器支持RTMP/MP4转RTSP
- RTSPS 服务器支持亚马逊echo show这样的设备
- RTSP 播放器支持RTSP代理支持生成静音音频
- RTSP 推流客户端与服务器
- RTSP[S]
- RTSP[S] 服务器支持RTMP/MP4/HLS转RTSP[S],支持亚马逊echo show这样的设备
- RTSP[S] 播放器支持RTSP代理支持生成静音音频
- RTSP[S] 推流客户端与服务器
- 支持 `rtp over udp` `rtp over tcp` `rtp over http` `rtp组播` 四种RTP传输方式
- 服务器/客户端完整支持Basic/Digest方式的登录鉴权全异步可配置化的鉴权接口
- 支持H265编码
- 服务器支持RTSP推流(包括`rtp over udp` `rtp over tcp`方式)
- 支持任意编码格式的rtsp推流只是除H264/H265/AAC/G711外无法转协议
- RTMP
- RTMP 播放服务器支持RTSP/MP4转RTMP
- RTMP 发布服务器,支持录制发布流
- RTMP 播放器支持RTMP代理支持生成静音音频
- RTMP 推流客户端
- RTMP[S]
- RTMP[S] 播放服务器支持RTSP/MP4/HLS转RTMP
- RTMP[S] 发布服务器,支持录制发布流
- RTMP[S] 播放器支持RTMP代理支持生成静音音频
- RTMP[S] 推流客户端
- 支持http[s]-flv直播
- 支持websocket-flv直播
- 支持任意编码格式的rtmp推流只是除H264/H265/AAC/G711外无法转协议
@ -55,6 +54,7 @@
- 支持HLS文件生成自带HTTP文件服务器
- 通过cookie追踪技术可以模拟HLS播放为长连接实现丰富的业务逻辑
- 支持完备的HLS用户追踪、播放统计等业务功能可以实现HLS按需拉流等业务
- 支持HLS播发器支持拉流HLS转rtsp/rtmp/mp4
- HTTP[S]
- 服务器支持`目录索引生成`,`文件下载`,`表单提交请求`
@ -71,7 +71,7 @@
- 点播
- 支持录制为FLV/HLS/MP4
- RTSP/RTMP/HTTP-FLV/WS-FLV支持MP4文件点播支持seek
- 其他
- 支持丰富的restful api以及web hook事件
- 支持简单的telnet调试
@ -81,11 +81,15 @@
- 支持按需拉流,无人观看自动关断拉流
- 支持先拉流后推流,提高及时推流画面打开率
- 提供c api sdk
- 支持FFmpeg拉流代理任意格式的流
- 支持http api生成并返回实时截图
## 更新日志
- 2020/5/17 新增支持hls播发器支持hls拉流代理
## 编译以及测试
请参考wiki:[快速开始](https://github.com/xiongziliang/ZLMediaKit/wiki/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B)
**编译前务必仔细参考wiki:[快速开始](https://github.com/xiongziliang/ZLMediaKit/wiki/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B)操作!!!**
## 怎么使用
@ -114,8 +118,12 @@ bash build_docker_images.sh
- [IOS摄像头实时录制,生成rtsp/rtmp/hls/http-flv](https://gitee.com/xiahcu/IOSMedia)
- [IOS rtmp/rtsp播放器视频推流器](https://gitee.com/xiahcu/IOSPlayer)
- [支持linux、windows、mac的rtmp/rtsp播放器](https://github.com/xiongziliang/ZLMediaPlayer)
- [配套的管理WEB网站](https://github.com/chenxiaolei/ZLMediaKit_NVR_UI)
- [基于ZLMediaKit分支的管理WEB网站](https://github.com/chenxiaolei/ZLMediaKit_NVR_UI)
- [基于ZLMediaKit主线的管理WEB网站](https://gitee.com/kkkkk5G/MediaServerUI)
- [DotNetCore的RESTful客户端](https://github.com/MingZhuLiu/ZLMediaKit.DotNetCore.Sdk)
- [GB28181-2016网络视频平台](https://github.com/swwheihei/wvp)
- [node-js版本的GB28181平台](https://gitee.com/hfwudao/GB28181_Node_Http)
## 授权协议

View File

@ -1,4 +1,4 @@
![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/logo.png)
![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/www/logo.png)
# A lightweight ,high performance and stable stream server and client framework based on C++11.
@ -15,18 +15,18 @@
## Features
- RTSP
- RTSP[S]
- RTSP[S] server,support rtsp push.
- RTSP player and pusher.
- RTSP[S] player and pusher.
- RTP Transport : `rtp over udp` `rtp over tcp` `rtp over http` `rtp udp multicast` .
- Basic/Digest/Url Authentication.
- H264/H265/AAC/G711 codec.
- Recorded as mp4.
- Vod of mp4.
- RTMP
- RTMP server,support player and pusher.
- RTMP player and pusher.
- RTMP[S]
- RTMP[S] server,support player and pusher.
- RTMP[S] player and pusher.
- Support HTTP-FLV player.
- H264/H265/AAC/G711 codec.
- Recorded as flv or mp4.
@ -36,6 +36,7 @@
- HLS
- RTSP RTMP can be converted into HLS,built-in HTTP server.
- Play authentication based on cookie.
- Support HLS player, support streaming HLS proxy to RTSP / RTMP / MP4.
- HTTP[S]
- HTTP server,suppor directory meun、RESTful http api.
@ -53,6 +54,7 @@
- Play and push authentication.
- Pull stream on Demand.
- Support TS / PS streaming push through RTP,and it can be converted to RTSP / RTMP / HLS / FLV.
- Support real-time online screenshot http api.
- Protocol conversion:
@ -67,6 +69,7 @@
| RTMP --> MP4 | Y | Y | Y | N |
| MP4 --> RTSP[S] | Y | Y | Y | N |
| MP4 --> RTMP | Y | Y | Y | N |
| HLS --> RTSP/RTMP/MP4 | Y | Y | Y | N |
- Stream generation
@ -106,7 +109,7 @@
| RTMP Pusher | Y |
| HTTP[S] | Y |
| WebSocket[S] | Y |
| HLS player | Y |
## System Requirements

View File

@ -17,12 +17,12 @@
#ifndef MediaKitApi_STATIC
#if defined(MediaKitApi_EXPORTS)
#define API_EXPORT __declspec(dllexport)
#else
#define API_EXPORT __declspec(dllimport)
#endif
#define API_EXPORT __declspec(dllexport)
#else
#define API_EXPORT __declspec(dllimport)
#endif
#define API_CALL __cdecl
#define API_CALL __cdecl
#else
#define API_EXPORT
#define API_CALL

View File

@ -36,6 +36,22 @@ API_EXPORT mk_thread API_CALL mk_thread_from_tcp_session(mk_tcp_session ctx);
*/
API_EXPORT mk_thread API_CALL mk_thread_from_tcp_client(mk_tcp_client ctx);
/**
* 线线
* 线线
* 线io事件线程
* @return 线
*/
API_EXPORT mk_thread API_CALL mk_thread_from_pool();
/**
* 线线
* 线线
* ZLMediaKit中后台线程用于dns解析mp4点播时的文件解复用
* @return 线
*/
API_EXPORT mk_thread API_CALL mk_thread_from_pool_work();
///////////////////////////////////////////线程切换/////////////////////////////////////////////
typedef void (API_CALL *on_mk_async)(void *user_data);

View File

@ -144,10 +144,12 @@ API_EXPORT uint16_t API_CALL mk_tcp_server_start(uint16_t port, mk_tcp_type type
s_tcp_server[type]->start<TcpSessionWithSSL<TcpSessionForC> >(port);
break;
case mk_type_ws:
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpSession>>(port);
//此处你也可以修改WebSocketHeader::BINARY
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpSession, WebSocketHeader::TEXT> >(port);
break;
case mk_type_wss:
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpsSession>>(port);
//此处你也可以修改WebSocketHeader::BINARY
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpsSession, WebSocketHeader::TEXT> >(port);
break;
default:
return 0;
@ -208,8 +210,10 @@ TcpClientForC::Ptr *mk_tcp_client_create_l(mk_tcp_client_events *events, mk_tcp_
case mk_type_ssl:
return (TcpClientForC::Ptr *)new shared_ptr<TcpSessionWithSSL<TcpClientForC> >(new TcpSessionWithSSL<TcpClientForC>(events));
case mk_type_ws:
//此处你也可以修改WebSocketHeader::BINARY
return (TcpClientForC::Ptr *)new shared_ptr<WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, false> >(new WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, false>(events));
case mk_type_wss:
//此处你也可以修改WebSocketHeader::BINARY
return (TcpClientForC::Ptr *)new shared_ptr<WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, true> >(new WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, true>(events));
default:
return nullptr;

View File

@ -12,6 +12,7 @@
#include "mk_tcp_private.h"
#include "Util/logger.h"
#include "Poller/EventPoller.h"
#include "Thread/WorkThreadPool.h"
using namespace std;
using namespace toolkit;
@ -27,6 +28,14 @@ API_EXPORT mk_thread API_CALL mk_thread_from_tcp_client(mk_tcp_client ctx){
return (*client)->getPoller().get();
}
API_EXPORT mk_thread API_CALL mk_thread_from_pool(){
return EventPollerPool::Instance().getPoller().get();
}
API_EXPORT mk_thread API_CALL mk_thread_from_pool_work(){
return WorkThreadPool::Instance().getPoller().get();
}
API_EXPORT void API_CALL mk_async_do(mk_thread ctx,on_mk_async cb, void *user_data){
assert(ctx && cb);
EventPoller *poller = (EventPoller *)ctx;

View File

@ -4,12 +4,18 @@ apiDebug=1
#一些比较敏感的http api在访问时需要提供secret否则无权限调用
#如果是通过127.0.0.1访问,那么可以不提供secret
secret=035c73f7-bb6b-4889-a715-d9eb2d1925cc
#截图保存路径根目录截图通过http api(/index/api/getSnap)生成和获取
snapRoot=./www/snap/
#默认截图图片在启动FFmpeg截图后但是截图还未生成时可以返回默认的预设图片
defaultSnap=./www/logo.png
[ffmpeg]
#FFmpeg可执行程序绝对路径
bin=/usr/local/bin/ffmpeg
#FFmpeg拉流再推流的命令模板通过该模板可以设置再编码的一些参数
cmd=%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s
#FFmpeg生成截图的命令可以通过修改该配置改变截图分辨率或质量
snap=%s -i %s -y -f mjpeg -t 0.001 %s
#FFmpeg日志的路径如果置空则不生成FFmpeg日志
#可以为相对(相对于本可执行程序目录)或绝对路径
log=./ffmpeg/ffmpeg.log
@ -43,6 +49,11 @@ publishToMP4=0
#合并写缓存大小(单位毫秒)合并写指服务器缓存一定的数据后才会一次性写入socket这样能提高性能但是会提高延时
#开启后会同时关闭TCP_NODELAY并开启MSG_MORE
mergeWriteMS=0
#全局的时间戳覆盖开关在转协议时对frame进行时间戳覆盖
#该开关对rtsp/rtmp/rtp推流、rtsp/rtmp/hls拉流代理转协议时生效
#会直接影响rtsp/rtmp/hls/mp4/flv等协议的时间戳
#同协议情况下不影响(例如rtsp/rtmp推流那么播放rtsp/rtmp时不会影响时间戳)
modifyStamp=0
[hls]
#hls写文件的buf大小调整参数可以提高文件io性能
@ -76,8 +87,9 @@ on_publish=https://127.0.0.1/index/hook/on_publish
on_record_mp4=https://127.0.0.1/index/hook/on_record_mp4
#rtsp播放鉴权事件此事件中比对rtsp的用户名密码
on_rtsp_auth=https://127.0.0.1/index/hook/on_rtsp_auth
#rtsp播放是否开启鉴权事件置空则关闭rtsp鉴权。rtsp播放鉴权还支持url方式鉴权
#rtsp播放是否开启专属鉴权事件置空则关闭rtsp鉴权。rtsp播放鉴权还支持url方式鉴权
#建议开发者统一采用url参数方式鉴权rtsp用户名密码鉴权一般在设备上用的比较多
#开启rtsp专属鉴权后将不再触发on_play鉴权事件
on_rtsp_realm=https://127.0.0.1/index/hook/on_rtsp_realm
#远程telnet调试鉴权事件
on_shell_login=https://127.0.0.1/index/hook/on_shell_login

View File

@ -13,26 +13,27 @@
#include "Common/MediaSource.h"
#include "Util/File.h"
#include "System.h"
#include "Thread/WorkThreadPool.h"
namespace FFmpeg {
#define FFmpeg_FIELD "ffmpeg."
const string kBin = FFmpeg_FIELD"bin";
const string kCmd = FFmpeg_FIELD"cmd";
const string kLog = FFmpeg_FIELD"log";
const string kSnap = FFmpeg_FIELD"snap";
onceToken token([]() {
#ifdef _WIN32
string ffmpeg_bin = System::execute("where ffmpeg");
//windows下先关闭FFmpeg日志(目前不支持日志重定向)
mINI::Instance()[kCmd] = "%s -re -i \"%s\" -loglevel quiet -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s ";
string ffmpeg_bin = trim(System::execute("where ffmpeg"));
#else
string ffmpeg_bin = System::execute("which ffmpeg");
mINI::Instance()[kCmd] = "%s -re -i \"%s\" -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s ";
string ffmpeg_bin = trim(System::execute("which ffmpeg"));
#endif
//默认ffmpeg命令路径为环境变量中路径
mINI::Instance()[kBin] = ffmpeg_bin.empty() ? "ffmpeg" : ffmpeg_bin;
//ffmpeg日志保存路径
mINI::Instance()[kLog] = "./ffmpeg/ffmpeg.log";
mINI::Instance()[kCmd] = "%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s";
mINI::Instance()[kSnap] = "%s -i %s -y -f mjpeg -t 0.001 %s";
});
}
@ -114,8 +115,7 @@ void FFmpegSource::findAsync(int maxWaitMS, const function<void(const MediaSourc
auto src = MediaSource::find(_media_info._schema,
_media_info._vhost,
_media_info._app,
_media_info._streamid,
false);
_media_info._streamid);
if(src || !maxWaitMS){
cb(src);
return;
@ -196,7 +196,19 @@ void FFmpegSource::startTimer(int timeout_ms) {
//推流给其他服务器的我们通过判断FFmpeg进程是否在线如果FFmpeg推流中断那么它应该会自动退出
if (!strongSelf->_process.wait(false)) {
//ffmpeg不在线重新拉流
strongSelf->play(strongSelf->_src_url, strongSelf->_dst_url, timeout_ms, [](const SockException &) {});
strongSelf->play(strongSelf->_src_url, strongSelf->_dst_url, timeout_ms, [weakSelf](const SockException &ex) {
if(!ex){
//没有错误
return;
}
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
//自身已经销毁
return;
}
//上次重试时间超过10秒那么再重试FFmpeg拉流
strongSelf->startTimer(10 * 1000);
});
}
}
return true;
@ -232,3 +244,31 @@ void FFmpegSource::onGetMediaSource(const MediaSource::Ptr &src) {
_listener = src->getListener();
src->setListener(shared_from_this());
}
void FFmpegSnap::makeSnap(const string &play_url, const string &save_path, float timeout_sec, const function<void(bool)> &cb) {
GET_CONFIG(string,ffmpeg_bin,FFmpeg::kBin);
GET_CONFIG(string,ffmpeg_snap,FFmpeg::kSnap);
GET_CONFIG(string,ffmpeg_log,FFmpeg::kLog);
std::shared_ptr<Process> process = std::make_shared<Process>();
auto delayTask = EventPollerPool::Instance().getPoller()->doDelayTask(timeout_sec * 1000,[process,cb](){
if(process->wait(false)){
//FFmpeg进程还在运行超时就关闭它
process->kill(2000);
}
return 0;
});
WorkThreadPool::Instance().getPoller()->async([process,play_url,save_path,delayTask,cb](){
char cmd[1024] = {0};
snprintf(cmd, sizeof(cmd),ffmpeg_snap.data(),ffmpeg_bin.data(),play_url.data(),save_path.data());
process->run(cmd,ffmpeg_log.empty() ? "" : File::absolutePath("",ffmpeg_log));
//等待FFmpeg进程退出
process->wait(true);
//FFmpeg进程退出了可以取消定时器了
delayTask->cancel();
//执行回调函数
cb(process->exit_code() == 0);
});
}

View File

@ -23,6 +23,23 @@ using namespace std;
using namespace toolkit;
using namespace mediakit;
namespace FFmpeg {
extern const string kSnap;
}
class FFmpegSnap {
public:
/// 创建截图
/// \param play_url 播放url地址只要FFmpeg支持即可
/// \param save_path 截图jpeg文件保存路径
/// \param timeout_sec 生成截图超时时间(防止阻塞太久)
/// \param cb 生成截图成功与否回调
static void makeSnap(const string &play_url, const string &save_path, float timeout_sec, const function<void(bool)> &cb);
private:
FFmpegSnap() = delete;
~FFmpegSnap() = delete;
};
class FFmpegSource : public std::enable_shared_from_this<FFmpegSource> , public MediaSourceEvent{
public:
typedef shared_ptr<FFmpegSource> Ptr;

View File

@ -10,13 +10,13 @@
#include <limits.h>
#include <sys/stat.h>
#ifndef _WIN32
#include <sys/resource.h>
#include <unistd.h>
#else
//#include <TlHelp32.h>
#include <windows.h>
#include <io.h>
#endif
#include <stdexcept>
@ -32,68 +32,83 @@ using namespace toolkit;
void Process::run(const string &cmd, const string &log_file_tmp) {
kill(2000);
#ifdef _WIN32
STARTUPINFO si;
PROCESS_INFORMATION pi;
ZeroMemory(&si, sizeof(si)); //结构体初始化;
ZeroMemory(&pi, sizeof(pi));
STARTUPINFO si = {0};
PROCESS_INFORMATION pi = {0};
string log_file;
if (log_file_tmp.empty()) {
//未指定子进程日志文件时,重定向至/dev/null
log_file = "NUL";
} else {
log_file = StrPrinter << log_file_tmp << "." << getCurrentMillisecond();
}
//重定向shell日志至文件
auto fp = File::create_file(log_file.data(), "ab");
if (!fp) {
fprintf(stderr, "open log file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
} else {
auto log_fd = (HANDLE)(_get_osfhandle(fileno(fp)));
// dup to stdout and stderr.
si.wShowWindow = SW_HIDE;
// STARTF_USESHOWWINDOW:The wShowWindow member contains additional information.
// STARTF_USESTDHANDLES:The hStdInput, hStdOutput, and hStdError members contain additional information.
si.dwFlags = STARTF_USESHOWWINDOW | STARTF_USESTDHANDLES;
si.hStdError = log_fd;
si.hStdOutput = log_fd;
}
LPTSTR lpDir = const_cast<char*>(cmd.data());
if (CreateProcess(NULL, lpDir, NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)){
if (CreateProcess(NULL, lpDir, NULL, NULL, TRUE, 0, NULL, NULL, &si, &pi)){
//下面两行关闭句柄,解除本进程和新进程的关系,不然有可能 不小心调用TerminateProcess函数关掉子进程
CloseHandle(pi.hProcess);
CloseHandle(pi.hThread);
_pid = pi.dwProcessId;
InfoL << "start child proces " << _pid;
_handle = pi.hProcess;
fprintf(fp, "\r\n\r\n#### pid=%d,cmd=%s #####\r\n\r\n", _pid, cmd.data());
InfoL << "start child process " << _pid << ", log file:" << log_file;
} else {
WarnL << "start child proces fail: " << GetLastError();
WarnL << "start child process fail: " << get_uv_errmsg();
}
#else
fclose(fp);
#else
_pid = fork();
if (_pid < 0) {
throw std::runtime_error(StrPrinter << "fork child process falied,err:" << get_uv_errmsg());
throw std::runtime_error(StrPrinter << "fork child process failed,err:" << get_uv_errmsg());
}
if (_pid == 0) {
string log_file;
if (log_file_tmp.empty()) {
//未指定子进程日志文件时,重定向至/dev/null
log_file = "/dev/null";
} else {
log_file = StrPrinter << log_file_tmp << "." << getpid();
}
//子进程关闭core文件生成
struct rlimit rlim = { 0,0 };
struct rlimit rlim = {0, 0};
setrlimit(RLIMIT_CORE, &rlim);
//在启动子进程时暂时禁用SIGINT、SIGTERM信号
// ignore the SIGINT and SIGTERM
signal(SIGINT, SIG_IGN);
signal(SIGTERM, SIG_IGN);
string log_file;
if (log_file_tmp.empty()) {
log_file = "/dev/null";
}
else {
log_file = StrPrinter << log_file_tmp << "." << getpid();
}
int log_fd = -1;
int flags = O_CREAT | O_WRONLY | O_APPEND;
mode_t mode = S_IRWXO | S_IRWXG | S_IRWXU;// S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH;
File::create_path(log_file.data(), mode);
if ((log_fd = ::open(log_file.c_str(), flags, mode)) < 0) {
fprintf(stderr, "open log file %s failed:%d(%s)\r\n", log_file.data(), errno, strerror(errno));
}
else {
//重定向shell日志至文件
auto fp = File::create_file(log_file.data(), "ab");
if (!fp) {
fprintf(stderr, "open log file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
} else {
auto log_fd = fileno(fp);
// dup to stdout and stderr.
if (dup2(log_fd, STDOUT_FILENO) < 0) {
fprintf(stderr, "dup2 stdout file %s failed:%d(%s)\r\n", log_file.data(), errno, strerror(errno));
fprintf(stderr, "dup2 stdout file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
}
if (dup2(log_fd, STDERR_FILENO) < 0) {
fprintf(stderr, "dup2 stderr file %s failed:%d(%s)\r\n", log_file.data(), errno, strerror(errno));
fprintf(stderr, "dup2 stderr file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
}
// close log fd
::close(log_fd);
// 关闭日志文件
::fclose(fp);
}
fprintf(stderr, "\r\n\r\n#### pid=%d,cmd=%s #####\r\n\r\n", getpid(), cmd.data());
// close other fds
// TODO: do in right way.
//关闭父进程继承的fd
for (int i = 3; i < 1024; i++) {
::close(i);
}
@ -101,9 +116,9 @@ void Process::run(const string &cmd, const string &log_file_tmp) {
auto params = split(cmd, " ");
// memory leak in child process, it's ok.
char **charpv_params = new char *[params.size() + 1];
for (int i = 0; i < (int)params.size(); i++) {
for (int i = 0; i < (int) params.size(); i++) {
std::string &p = params[i];
charpv_params[i] = (char *)p.data();
charpv_params[i] = (char *) p.data();
}
// EOF: NULL
charpv_params[params.size()] = NULL;
@ -111,11 +126,19 @@ void Process::run(const string &cmd, const string &log_file_tmp) {
// TODO: execv or execvp
auto ret = execv(params[0].c_str(), charpv_params);
if (ret < 0) {
fprintf(stderr, "fork process failed, errno=%d(%s)\r\n", errno, strerror(errno));
fprintf(stderr, "fork process failed:%d(%s)\r\n", get_uv_error(), get_uv_errmsg());
}
exit(ret);
}
InfoL << "start child proces " << _pid;
string log_file;
if (log_file_tmp.empty()) {
//未指定子进程日志文件时,重定向至/dev/null
log_file = "/dev/null";
} else {
log_file = StrPrinter << log_file_tmp << "." << _pid;
}
InfoL << "start child process " << _pid << ", log file:" << log_file;
#endif // _WIN32
}
@ -126,24 +149,41 @@ void Process::run(const string &cmd, const string &log_file_tmp) {
* @param block
* @return
*/
static bool s_wait(pid_t pid,int *exit_code_ptr,bool block) {
static bool s_wait(pid_t pid, void *handle, int *exit_code_ptr, bool block) {
if (pid <= 0) {
return false;
}
int status = 0;
#ifdef _WIN32
HANDLE hProcess = NULL;
hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, pid); //打开目标进程
if (hProcess == NULL) {
DWORD code = 0;
if (block) {
//一直等待
code = WaitForSingleObject(handle, INFINITE);
} else {
code = WaitForSingleObject(handle, 0);
}
if(code == WAIT_FAILED || code == WAIT_OBJECT_0){
//子进程已经退出了,获取子进程退出代码
DWORD exitCode = 0;
if(exit_code_ptr && GetExitCodeProcess(handle, &exitCode)){
*exit_code_ptr = exitCode;
}
return false;
}
CloseHandle(hProcess);
if(code == WAIT_TIMEOUT){
//子进程还在线
return true;
}
//不太可能运行到此处
WarnL << "WaitForSingleObject ret:" << code;
return false;
#else
int status = 0;
pid_t p = waitpid(pid, &status, block ? 0 : WNOHANG);
int exit_code = (status & 0xFF00) >> 8;
if (exit_code_ptr) {
*exit_code_ptr = (status & 0xFF00) >> 8;
*exit_code_ptr = exit_code;
}
if (p < 0) {
WarnL << "waitpid failed, pid=" << pid << ", err=" << get_uv_errmsg();
@ -153,26 +193,57 @@ static bool s_wait(pid_t pid,int *exit_code_ptr,bool block) {
InfoL << "process terminated, pid=" << pid << ", exit code=" << exit_code;
return false;
}
#endif // _WIN32
return true;
#endif // _WIN32
}
static void s_kill(pid_t pid,int max_delay,bool force){
#ifdef _WIN32
// Inspired from http://stackoverflow.com/a/15281070/1529139
// and http://stackoverflow.com/q/40059902/1529139
bool signalCtrl(DWORD dwProcessId, DWORD dwCtrlEvent){
bool success = false;
DWORD thisConsoleId = GetCurrentProcessId();
// Leave current console if it exists
// (otherwise AttachConsole will return ERROR_ACCESS_DENIED)
bool consoleDetached = (FreeConsole() != FALSE);
if (AttachConsole(dwProcessId) != FALSE){
// Add a fake Ctrl-C handler for avoid instant kill is this console
// WARNING: do not revert it or current program will be also killed
SetConsoleCtrlHandler(nullptr, true);
success = (GenerateConsoleCtrlEvent(dwCtrlEvent, 0) != FALSE);
FreeConsole();
}
if (consoleDetached){
// Create a new console if previous was deleted by OS
if (AttachConsole(thisConsoleId) == FALSE){
int errorCode = GetLastError();
if (errorCode == 31){
// 31=ERROR_GEN_FAILURE
AllocConsole();
}
}
}
return success;
}
#endif // _WIN32
static void s_kill(pid_t pid, void *handle, int max_delay, bool force) {
if (pid <= 0) {
//pid无效
return;
}
#ifdef _WIN32
HANDLE hProcess = NULL;
hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, pid); //打开目标进程
if (hProcess == NULL) {
WarnL << "\nOpen Process fAiled: " << GetLastError();
return;
}
DWORD ret = TerminateProcess(hProcess, 0); //结束目标进程
if (ret == 0) {
WarnL << GetLastError;
//windows下目前没有比较好的手段往子进程发送SIGTERM或信号
//所以杀死子进程的方式全部强制为立即关闭
force = true;
if(force){
//强制关闭子进程
TerminateProcess(handle, 0);
}else{
//非强制关闭发送Ctr+C信号
signalCtrl(pid, CTRL_C_EVENT);
}
#else
if (::kill(pid, force ? SIGKILL : SIGTERM) == -1) {
@ -182,33 +253,38 @@ static void s_kill(pid_t pid,int max_delay,bool force){
}
#endif // _WIN32
if(force){
if (force) {
//发送SIGKILL信号后阻塞等待退出
s_wait(pid, NULL, true);
s_wait(pid, handle, nullptr, true);
DebugL << "force kill " << pid << " success!";
return;
}
//发送SIGTERM信号后2秒后检查子进程是否已经退出
WorkThreadPool::Instance().getPoller()->doDelayTask(max_delay,[pid](){
if (!s_wait(pid, nullptr, false)) {
WorkThreadPool::Instance().getPoller()->doDelayTask(max_delay, [pid, handle]() {
if (!s_wait(pid, handle, nullptr, false)) {
//进程已经退出了
return 0;
}
//进程还在运行
WarnL << "process still working,force kill it:" << pid;
s_kill(pid,0, true);
s_kill(pid, handle, 0, true);
return 0;
});
}
void Process::kill(int max_delay,bool force) {
void Process::kill(int max_delay, bool force) {
if (_pid <= 0) {
return;
}
s_kill(_pid,max_delay,force);
s_kill(_pid, _handle, max_delay, force);
_pid = -1;
#ifdef _WIN32
if(_handle){
CloseHandle(_handle);
_handle = nullptr;
}
#endif
}
Process::~Process() {
@ -218,7 +294,7 @@ Process::~Process() {
Process::Process() {}
bool Process::wait(bool block) {
return s_wait(_pid,&_exit_code,block);
return s_wait(_pid, _handle, &_exit_code, block);
}
int Process::exit_code() {

View File

@ -31,6 +31,7 @@ public:
int exit_code();
private:
pid_t _pid = -1;
void *_handle = nullptr;
int _exit_code = 0;
};

View File

@ -52,7 +52,7 @@ string System::execute(const string &cmd) {
#if !defined(ANDROID) && !defined(_WIN32)
static string addr2line(const string &address) {
string cmd = StrPrinter << "addr2line -e " << exePath() << " " << address;
string cmd = StrPrinter << "addr2line -C -f -e " << exePath() << " " << address;
return System::execute(cmd);
}

View File

@ -8,11 +8,12 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#include <sys/stat.h>
#include <math.h>
#include <signal.h>
#include <functional>
#include <sstream>
#include <unordered_map>
#include <math.h>
#include "jsoncpp/json.h"
#include "Util/util.h"
#include "Util/logger.h"
@ -50,10 +51,14 @@ typedef enum {
#define API_FIELD "api."
const string kApiDebug = API_FIELD"apiDebug";
const string kSecret = API_FIELD"secret";
const string kSnapRoot = API_FIELD"snapRoot";
const string kDefaultSnap = API_FIELD"defaultSnap";
static onceToken token([]() {
mINI::Instance()[kApiDebug] = "1";
mINI::Instance()[kSecret] = "035c73f7-bb6b-4889-a715-d9eb2d1925cc";
mINI::Instance()[kSnapRoot] = "./www/snap/";
mINI::Instance()[kDefaultSnap] = "./www/logo.png";
});
}//namespace API
@ -145,7 +150,6 @@ static inline void addHttpListener(){
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastHttpRequest, [](BroadcastHttpRequestArgs) {
auto it = s_map_api.find(parser.Url());
if (it == s_map_api.end()) {
consumed = false;
return;
}
//该api已被消费
@ -174,7 +178,7 @@ static inline void addHttpListener(){
size = body->remainSize();
}
if(size < 4 * 1024){
if(size && size < 4 * 1024){
string contentOut = body->readData(size)->toString();
DebugL << "\r\n# request:\r\n" << parser.Method() << " " << parser.FullUrl() << "\r\n"
<< "# content:\r\n" << parser.Content() << "\r\n"
@ -436,14 +440,14 @@ void installWebApi() {
api_regist1("/index/api/isMediaOnline",[](API_ARGS1){
CHECK_SECRET();
CHECK_ARGS("schema","vhost","app","stream");
val["online"] = (bool) (MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"],false));
val["online"] = (bool) (MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"]));
});
//测试url http://127.0.0.1/index/api/getMediaInfo?schema=rtsp&vhost=__defaultVhost__&app=live&stream=obs
api_regist1("/index/api/getMediaInfo",[](API_ARGS1){
CHECK_SECRET();
CHECK_ARGS("schema","vhost","app","stream");
auto src = MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"],false);
auto src = MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"]);
if(!src){
val["online"] = false;
return;
@ -817,6 +821,78 @@ void installWebApi() {
val["data"]["paths"] = paths;
});
static auto responseSnap = [](const string &snap_path,
const HttpSession::KeyValue &headerIn,
const HttpSession::HttpResponseInvoker &invoker) {
StrCaseMap headerOut;
struct stat statbuf = {0};
GET_CONFIG(string, defaultSnap, API::kDefaultSnap);
if (!(stat(snap_path.data(), &statbuf) == 0 && statbuf.st_size != 0) && !defaultSnap.empty()) {
//空文件且设置了预设图,则返回预设图片(也就是FFmpeg生成截图中空档期的默认图片)
const_cast<string&>(snap_path) = File::absolutePath(defaultSnap, "");
headerOut["Content-Type"] = HttpFileManager::getContentType(snap_path.data());
} else {
//之前生成的截图文件我们默认为jpeg格式
headerOut["Content-Type"] = HttpFileManager::getContentType(".jpeg");
}
//返回图片给http客户端
invoker.responseFile(headerIn, headerOut, snap_path);
};
//获取截图缓存或者实时截图
//http://127.0.0.1/index/api/getSnap?url=rtmp://127.0.0.1/record/robot.mp4&timeout_sec=10&expire_sec=3
api_regist2("/index/api/getSnap", [](API_ARGS2){
CHECK_SECRET();
CHECK_ARGS("url", "timeout_sec", "expire_sec");
GET_CONFIG(string, snap_root, API::kSnapRoot);
int expire_sec = allArgs["expire_sec"];
auto scan_path = File::absolutePath(MD5(allArgs["url"]).hexdigest(), snap_root) + "/";
string snap_path;
File::scanDir(scan_path, [&](const string &path, bool isDir) {
if (isDir) {
//忽略文件夹
return true;
}
//找到截图
auto tm = FindField(path.data() + scan_path.size(), nullptr, ".jpeg");
if (atoll(tm.data()) + expire_sec < time(NULL)) {
//截图已经过期,删除之,后面重新生成
File::delete_file(path.data());
return true;
}
//截图未过期,中断遍历,返回上次生成的截图
snap_path = path;
return false;
});
if(!snap_path.empty()){
responseSnap(snap_path, headerIn, invoker);
return;
}
//无截图或者截图已经过期
snap_path = StrPrinter << scan_path << time(NULL) << ".jpeg";
//生成一个空文件,目的是顺便创建文件夹路径,
//同时防止在FFmpeg生成截图途中不停的尝试调用该api启动FFmpeg生成相同的截图
auto file = File::create_file(snap_path.data(), "wb");
if (file) {
fclose(file);
}
//启动FFmpeg进程开始截图
FFmpegSnap::makeSnap(allArgs["url"],snap_path,allArgs["timeout_sec"],[invoker,headerIn,snap_path](bool success){
if(!success){
//生成截图失败,可能残留空文件
File::delete_file(snap_path.data());
}
responseSnap(snap_path, headerIn, invoker);
});
});
////////////以下是注册的Hook API////////////
api_regist1("/index/hook/on_publish",[](API_ARGS1){
//开始推流事件

View File

@ -78,14 +78,6 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts
if(pts == 0){
pts = dts;
}
int prefixeSize;
if (memcmp("\x00\x00\x00\x01", data, 4) == 0) {
prefixeSize = 4;
} else if (memcmp("\x00\x00\x01", data, 3) == 0) {
prefixeSize = 3;
} else {
prefixeSize = 0;
}
//由于rtmp/hls/mp4需要缓存时间戳相同的帧
//所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝
@ -93,9 +85,8 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts
H264Frame::Ptr frame = std::make_shared<H264Frame>();
frame->_dts = dts;
frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01",4);
frame->_buffer.append(data + prefixeSize, len - prefixeSize);
frame->_prefix_size = 4;
frame->_buffer.assign(data, len);
frame->_prefix_size = prefixSize(data,len);
inputFrame(frame);
}
@ -106,14 +97,6 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts
if(pts == 0){
pts = dts;
}
int prefixeSize;
if (memcmp("\x00\x00\x00\x01", data, 4) == 0) {
prefixeSize = 4;
} else if (memcmp("\x00\x00\x01", data, 3) == 0) {
prefixeSize = 3;
} else {
prefixeSize = 0;
}
//由于rtmp/hls/mp4需要缓存时间戳相同的帧
//所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝
@ -121,9 +104,8 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts
H265Frame::Ptr frame = std::make_shared<H265Frame>();
frame->_dts = dts;
frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01",4);
frame->_buffer.append(data + prefixeSize, len - prefixeSize);
frame->_prefix_size = 4;
frame->_buffer.assign(data, len);
frame->_prefix_size = prefixSize(data,len);
inputFrame(frame);
}
@ -163,7 +145,9 @@ void DevChannel::inputG711(const char *data, int len, uint32_t dts){
if (dts == 0) {
dts = (uint32_t)_aTicker[1].elapsedTime();
}
inputFrame(std::make_shared<G711FrameNoCacheAble>(_audio->codecId, (char*)data, len, dts, 0));
auto frame = std::make_shared<G711FrameNoCacheAble>((char*)data, len, dts, 0);
frame->setCodec(_audio->codecId);
inputFrame(frame);
}
void DevChannel::initVideo(const VideoInfo &info) {

View File

@ -43,9 +43,9 @@ public:
class AudioInfo {
public:
CodecId codecId = CodecAAC;
int iChannel;
int iSampleBit;
int iSampleRate;
int iChannel;
int iSampleBit;
int iSampleRate;
};
/**

View File

@ -180,9 +180,8 @@ static void eraseIfEmpty(MAP &map, IT0 it0, IT1 it1, IT2 it2) {
}
};
void findAsync_l(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, bool retry,
const function<void(const MediaSource::Ptr &src)> &cb){
auto src = MediaSource::find(info._schema, info._vhost, info._app, info._streamid, true);
void MediaSource::findAsync_l(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, bool retry, const function<void(const MediaSource::Ptr &src)> &cb){
auto src = MediaSource::find_l(info._schema, info._vhost, info._app, info._streamid, true);
if(src || !retry){
cb(src);
return;
@ -248,7 +247,11 @@ void MediaSource::findAsync(const MediaInfo &info, const std::shared_ptr<TcpSess
return findAsync_l(info, session, true, cb);
}
MediaSource::Ptr MediaSource::find(const string &schema, const string &vhost_tmp, const string &app, const string &id, bool bMake) {
MediaSource::Ptr MediaSource::find(const string &schema, const string &vhost, const string &app, const string &id) {
return find_l(schema, vhost, app, id, false);
}
MediaSource::Ptr MediaSource::find_l(const string &schema, const string &vhost_tmp, const string &app, const string &id, bool bMake) {
string vhost = vhost_tmp;
if(vhost.empty()){
vhost = DEFAULT_VHOST;
@ -419,12 +422,10 @@ void MediaSourceEvent::onNoneReader(MediaSource &sender){
//如果mp4点播, 无人观看时我们强制关闭点播
bool is_mp4_vod = sender.getApp() == recordApp;
//无人观看mp4点播时3秒后自动关闭
auto close_delay = is_mp4_vod ? 3.0 : stream_none_reader_delay / 1000.0;
//没有任何人观看该视频源,表明该源可以关闭了
weak_ptr<MediaSource> weakSender = sender.shared_from_this();
_async_close_timer = std::make_shared<Timer>(close_delay, [weakSender,is_mp4_vod]() {
_async_close_timer = std::make_shared<Timer>(stream_none_reader_delay / 1000.0, [weakSender,is_mp4_vod]() {
auto strongSender = weakSender.lock();
if (!strongSender) {
//对象已经销毁
@ -467,7 +468,7 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string &
try {
MP4Reader::Ptr pReader(new MP4Reader(vhost, app, stream, filePath));
pReader->startReadMP4();
return MediaSource::find(schema, vhost, app, stream, false);
return MediaSource::find(schema, vhost, app, stream);
} catch (std::exception &ex) {
WarnL << ex.what();
return nullptr;
@ -478,57 +479,51 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string &
#endif //ENABLE_MP4
}
static bool isFlushAble_default(bool is_audio, uint32_t last_stamp, uint32_t new_stamp, int cache_size) {
if (new_stamp < last_stamp) {
//时间戳回退(可能seek中)
static bool isFlushAble_default(bool is_video, uint32_t last_stamp, uint32_t new_stamp, int cache_size) {
if (new_stamp + 500 < last_stamp) {
//时间戳回退比较大(可能seek中)由于rtp中时间戳是pts是可能存在一定程度的回退的
return true;
}
if (!is_audio) {
//这是视频,时间戳发送变化或者缓存超过1024个
return last_stamp != new_stamp || cache_size >= 1024;
}
//这是音频,缓存超过100ms或者缓存个数超过10个
return new_stamp > last_stamp + 100 || cache_size > 10;
//时间戳发送变化或者缓存超过1024个,sendmsg接口一般最多只能发送1024个数据包
return last_stamp != new_stamp || cache_size >= 1024;
}
static bool isFlushAble_merge(bool is_audio, uint32_t last_stamp, uint32_t new_stamp, int cache_size, int merge_ms) {
if (new_stamp < last_stamp) {
//时间戳回退(可能seek中)
static bool isFlushAble_merge(bool is_video, uint32_t last_stamp, uint32_t new_stamp, int cache_size, int merge_ms) {
if (new_stamp + 500 < last_stamp) {
//时间戳回退比较大(可能seek中)由于rtp中时间戳是pts是可能存在一定程度的回退的
return true;
}
if(new_stamp > last_stamp + merge_ms){
if (new_stamp > last_stamp + merge_ms) {
//时间戳增量超过合并写阈值
return true;
}
if (!is_audio) {
//这是视频,缓存数超过1024个,这个逻辑用于避免时间戳异常的流导致的内存暴增问题
//而且sendmsg接口一般最多只能发送1024个数据包
return cache_size >= 1024;
}
//这是音频音频缓存超过20个
return cache_size > 20;
//缓存数超过1024个,这个逻辑用于避免时间戳异常的流导致的内存暴增问题
//而且sendmsg接口一般最多只能发送1024个数据包
return cache_size >= 1024;
}
bool FlushPolicy::isFlushAble(uint32_t new_stamp, int cache_size) {
bool ret = false;
GET_CONFIG(int, mergeWriteMS, General::kMergeWriteMS);
if (mergeWriteMS <= 0) {
//关闭了合并写或者合并写阈值小于等于0
ret = isFlushAble_default(_is_audio, _last_stamp, new_stamp, cache_size);
bool FlushPolicy::isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size) {
bool flush_flag = false;
if (is_key && is_video) {
//遇到关键帧flush掉前面的数据确保关键帧为该组数据的第一帧确保GOP缓存有效
flush_flag = true;
} else {
ret = isFlushAble_merge(_is_audio, _last_stamp, new_stamp, cache_size, mergeWriteMS);
GET_CONFIG(int, mergeWriteMS, General::kMergeWriteMS);
if (mergeWriteMS <= 0) {
//关闭了合并写或者合并写阈值小于等于0
flush_flag = isFlushAble_default(is_video, _last_stamp[is_video], new_stamp, cache_size);
} else {
flush_flag = isFlushAble_merge(is_video, _last_stamp[is_video], new_stamp, cache_size, mergeWriteMS);
}
}
if (ret) {
// DebugL << _is_audio << " " << _last_stamp << " " << new_stamp;
_last_stamp = new_stamp;
if (flush_flag) {
_last_stamp[is_video] = new_stamp;
}
return ret;
return flush_flag;
}
} /* namespace mediakit */

View File

@ -134,7 +134,7 @@ public:
virtual bool isRecording(Recorder::type type);
// 同步查找流
static Ptr find(const string &schema, const string &vhost, const string &app, const string &id, bool bMake = true) ;
static Ptr find(const string &schema, const string &vhost, const string &app, const string &id);
// 异步查找流
static void findAsync(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, const function<void(const Ptr &src)> &cb);
// 遍历所有流
@ -142,9 +142,14 @@ public:
// 从mp4文件生成MediaSource
static MediaSource::Ptr createFromMP4(const string &schema, const string &vhost, const string &app, const string &stream, const string &filePath = "", bool checkApp = true);
protected:
void regist() ;
bool unregist() ;
bool unregist();
private:
static Ptr find_l(const string &schema, const string &vhost, const string &app, const string &id, bool bMake);
static void findAsync_l(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, bool retry, const function<void(const MediaSource::Ptr &src)> &cb);
private:
string _strSchema;
string _strVhost;
@ -159,10 +164,7 @@ private:
///缓存刷新策略类
class FlushPolicy {
public:
FlushPolicy(bool is_audio) {
_is_audio = is_audio;
};
FlushPolicy() = default;
~FlushPolicy() = default;
uint32_t getStamp(const RtpPacket::Ptr &packet) {
@ -173,45 +175,45 @@ public:
return packet->timeStamp;
}
bool isFlushAble(uint32_t new_stamp, int cache_size);
bool isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size);
private:
bool _is_audio;
uint32_t _last_stamp= 0;
uint32_t _last_stamp[2] = {0, 0};
};
/// 视频合并写缓存模板
/// 合并写缓存模板
/// \tparam packet 包类型
/// \tparam policy 刷新缓存策略
/// \tparam packet_list 包缓存类型
template<typename packet, typename policy = FlushPolicy, typename packet_list = List<std::shared_ptr<packet> > >
class VideoPacketCache {
class PacketCache {
public:
VideoPacketCache() : _policy(false) {
PacketCache(){
_cache = std::make_shared<packet_list>();
}
virtual ~VideoPacketCache() = default;
virtual ~PacketCache() = default;
void inputVideo(const std::shared_ptr<packet> &rtp, bool key_pos) {
if (_policy.isFlushAble(_policy.getStamp(rtp), _cache->size())) {
void inputPacket(bool is_video, const std::shared_ptr<packet> &pkt, bool key_pos) {
if (_policy.isFlushAble(is_video, key_pos, _policy.getStamp(pkt), _cache->size())) {
flushAll();
}
//追加数据到最后
_cache->emplace_back(rtp);
_cache->emplace_back(pkt);
if (key_pos) {
_key_pos = key_pos;
}
}
virtual void onFlushVideo(std::shared_ptr<packet_list> &, bool key_pos) = 0;
virtual void onFlush(std::shared_ptr<packet_list> &, bool key_pos) = 0;
private:
void flushAll() {
if (_cache->empty()) {
return;
}
onFlushVideo(_cache, _key_pos);
onFlush(_cache, _key_pos);
_cache = std::make_shared<packet_list>();
_key_pos = false;
}
@ -222,44 +224,5 @@ private:
bool _key_pos = false;
};
/// 音频频合并写缓存模板
/// \tparam packet 包类型
/// \tparam policy 刷新缓存策略
/// \tparam packet_list 包缓存类型
template<typename packet, typename policy = FlushPolicy, typename packet_list = List<std::shared_ptr<packet> > >
class AudioPacketCache {
public:
AudioPacketCache() : _policy(true) {
_cache = std::make_shared<packet_list>();
}
virtual ~AudioPacketCache() = default;
void inputAudio(const std::shared_ptr<packet> &rtp) {
if (_policy.isFlushAble(_policy.getStamp(rtp), _cache->size())) {
flushAll();
}
//追加数据到最后
_cache->emplace_back(rtp);
}
virtual void onFlushAudio(std::shared_ptr<packet_list> &) = 0;
private:
void flushAll() {
if (_cache->empty()) {
return;
}
onFlushAudio(_cache);
_cache = std::make_shared<packet_list>();
}
private:
policy _policy;
std::shared_ptr<packet_list> _cache;
};
} /* namespace mediakit */
#endif //ZLMEDIAKIT_MEDIASOURCE_H
#endif //ZLMEDIAKIT_MEDIASOURCE_H

View File

@ -298,8 +298,69 @@ void MultiMediaSourceMuxer::resetTracks() {
_muxer->resetTracks();
}
//该类实现frame级别的时间戳覆盖
class FrameModifyStamp : public Frame{
public:
typedef std::shared_ptr<FrameModifyStamp> Ptr;
FrameModifyStamp(const Frame::Ptr &frame, Stamp &stamp){
_frame = frame;
//覆盖时间戳
stamp.revise(frame->dts(), frame->pts(), _dts, _pts, true);
}
~FrameModifyStamp() override {}
uint32_t dts() const override{
return _dts;
}
uint32_t pts() const override{
return _pts;
}
uint32_t prefixSize() const override {
return _frame->prefixSize();
}
bool keyFrame() const override {
return _frame->keyFrame();
}
bool configFrame() const override {
return _frame->configFrame();
}
bool cacheAble() const override {
return _frame->cacheAble();
}
char *data() const override {
return _frame->data();
}
uint32_t size() const override {
return _frame->size();
}
CodecId getCodecId() const override {
return _frame->getCodecId();
}
private:
Frame::Ptr _frame;
int64_t _dts;
int64_t _pts;
};
void MultiMediaSourceMuxer::inputFrame(const Frame::Ptr &frame) {
_muxer->inputFrame(frame);
GET_CONFIG(bool,modify_stamp,General::kModifyStamp);
if(!modify_stamp){
//未开启时间戳覆盖
_muxer->inputFrame(frame);
}else{
//开启了时间戳覆盖
FrameModifyStamp::Ptr new_frame = std::make_shared<FrameModifyStamp>(frame,_stamp[frame->getTrackType()]);
//输入时间戳覆盖后的帧
_muxer->inputFrame(new_frame);
}
}
bool MultiMediaSourceMuxer::isEnabled(){

View File

@ -178,6 +178,7 @@ public:
private:
MultiMuxerPrivate::Ptr _muxer;
std::weak_ptr<MediaSourceEvent> _listener;
Stamp _stamp[2];
};
}//namespace mediakit

View File

@ -67,6 +67,7 @@ const string kPublishToRtxp = GENERAL_FIELD"publishToRtxp";
const string kPublishToHls = GENERAL_FIELD"publishToHls";
const string kPublishToMP4 = GENERAL_FIELD"publishToMP4";
const string kMergeWriteMS = GENERAL_FIELD"mergeWriteMS";
const string kModifyStamp = GENERAL_FIELD"modifyStamp";
onceToken token([](){
mINI::Instance()[kFlowThreshold] = 1024;
@ -79,6 +80,7 @@ onceToken token([](){
mINI::Instance()[kPublishToHls] = 1;
mINI::Instance()[kPublishToMP4] = 0;
mINI::Instance()[kMergeWriteMS] = 0;
mINI::Instance()[kModifyStamp] = 0;
},nullptr);
}//namespace General
@ -293,3 +295,10 @@ const string kBenchmarkMode = "benchmark_mode";
} // namespace mediakit
void Assert_Throw(int failed, const char *exp, const char *func, const char *file, int line){
if(failed) {
_StrPrinter printer;
printer << "Assertion failed: (" << exp << "), function " << func << ", file " << file << ", line " << line << ".";
throw std::runtime_error(printer);
}
}

View File

@ -174,6 +174,8 @@ extern const string kPublishToMP4 ;
//合并写缓存大小(单位毫秒)合并写指服务器缓存一定的数据后才会一次性写入socket这样能提高性能但是会提高延时
//开启后会同时关闭TCP_NODELAY并开启MSG_MORE
extern const string kMergeWriteMS ;
//全局的时间戳覆盖开关在转协议时对frame进行时间戳覆盖
extern const string kModifyStamp;
}//namespace General
@ -217,6 +219,7 @@ extern const string kDirectProxy;
////////////RTMP服务器配置///////////
namespace Rtmp {
//rtmp推流时间戳覆盖开关
extern const string kModifyStamp;
//握手超时时间默认15秒
extern const string kHandshakeSecond;

View File

@ -9,65 +9,63 @@
*/
#include "AAC.h"
#ifdef ENABLE_MP4
#include "mpeg4-aac.h"
#endif
namespace mediakit{
void writeAdtsHeader(const AACFrame &hed, uint8_t *pcAdts) {
pcAdts[0] = (hed.syncword >> 4 & 0xFF); //8bit
pcAdts[1] = (hed.syncword << 4 & 0xF0); //4 bit
pcAdts[1] |= (hed.id << 3 & 0x08); //1 bit
pcAdts[1] |= (hed.layer << 1 & 0x06); //2bit
pcAdts[1] |= (hed.protection_absent & 0x01); //1 bit
unsigned const samplingFrequencyTable[16] = { 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350, 0, 0, 0 };
pcAdts[2] = (hed.profile << 6 & 0xC0); // 2 bit
pcAdts[2] |= (hed.sf_index << 2 & 0x3C); //4bit
pcAdts[2] |= (hed.private_bit << 1 & 0x02); //1 bit
pcAdts[2] |= (hed.channel_configuration >> 2 & 0x03); //1 bit
class AdtsHeader{
public:
unsigned int syncword = 0; //12 bslbf 同步字The bit string 1111 1111 1111说明一个ADTS帧的开始
unsigned int id; //1 bslbf MPEG 标示符, 设置为1
unsigned int layer; //2 uimsbf Indicates which layer is used. Set to 00
unsigned int protection_absent; //1 bslbf 表示是否误码校验
unsigned int profile; //2 uimsbf 表示使用哪个级别的AAC如01 Low Complexity(LC)--- AACLC
unsigned int sf_index; //4 uimsbf 表示使用的采样率下标
unsigned int private_bit; //1 bslbf
unsigned int channel_configuration; //3 uimsbf 表示声道数
unsigned int original; //1 bslbf
unsigned int home; //1 bslbf
//下面的为改变的参数即每一帧都不同
unsigned int copyright_identification_bit; //1 bslbf
unsigned int copyright_identification_start; //1 bslbf
unsigned int aac_frame_length; // 13 bslbf 一个ADTS帧的长度包括ADTS头和raw data block
unsigned int adts_buffer_fullness; //11 bslbf 0x7FF 说明是码率可变的码流
//no_raw_data_blocks_in_frame 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧.
//所以说number_of_raw_data_blocks_in_frame == 0
//表示说ADTS帧中有一个AAC数据块并不是说没有。(一个AAC原始帧包含一段时间内1024个采样及相关数据)
unsigned int no_raw_data_blocks_in_frame; //2 uimsfb
};
pcAdts[3] = (hed.channel_configuration << 6 & 0xC0); // 2 bit
pcAdts[3] |= (hed.original << 5 & 0x20); //1 bit
pcAdts[3] |= (hed.home << 4 & 0x10); //1 bit
pcAdts[3] |= (hed.copyright_identification_bit << 3 & 0x08); //1 bit
pcAdts[3] |= (hed.copyright_identification_start << 2 & 0x04); //1 bit
pcAdts[3] |= (hed.aac_frame_length >> 11 & 0x03); //2 bit
pcAdts[4] = (hed.aac_frame_length >> 3 & 0xFF); //8 bit
pcAdts[5] = (hed.aac_frame_length << 5 & 0xE0); //3 bit
pcAdts[5] |= (hed.adts_buffer_fullness >> 6 & 0x1F); //5 bit
pcAdts[6] = (hed.adts_buffer_fullness << 2 & 0xFC); //6 bit
pcAdts[6] |= (hed.no_raw_data_blocks_in_frame & 0x03); //2 bit
static void dumpAdtsHeader(const AdtsHeader &hed, uint8_t *out) {
out[0] = (hed.syncword >> 4 & 0xFF); //8bit
out[1] = (hed.syncword << 4 & 0xF0); //4 bit
out[1] |= (hed.id << 3 & 0x08); //1 bit
out[1] |= (hed.layer << 1 & 0x06); //2bit
out[1] |= (hed.protection_absent & 0x01); //1 bit
out[2] = (hed.profile << 6 & 0xC0); // 2 bit
out[2] |= (hed.sf_index << 2 & 0x3C); //4bit
out[2] |= (hed.private_bit << 1 & 0x02); //1 bit
out[2] |= (hed.channel_configuration >> 2 & 0x03); //1 bit
out[3] = (hed.channel_configuration << 6 & 0xC0); // 2 bit
out[3] |= (hed.original << 5 & 0x20); //1 bit
out[3] |= (hed.home << 4 & 0x10); //1 bit
out[3] |= (hed.copyright_identification_bit << 3 & 0x08); //1 bit
out[3] |= (hed.copyright_identification_start << 2 & 0x04); //1 bit
out[3] |= (hed.aac_frame_length >> 11 & 0x03); //2 bit
out[4] = (hed.aac_frame_length >> 3 & 0xFF); //8 bit
out[5] = (hed.aac_frame_length << 5 & 0xE0); //3 bit
out[5] |= (hed.adts_buffer_fullness >> 6 & 0x1F); //5 bit
out[6] = (hed.adts_buffer_fullness << 2 & 0xFC); //6 bit
out[6] |= (hed.no_raw_data_blocks_in_frame & 0x03); //2 bit
}
string makeAdtsConfig(const uint8_t *pcAdts){
if (!(pcAdts[0] == 0xFF && (pcAdts[1] & 0xF0) == 0xF0)) {
return "";
}
// Get and check the 'profile':
unsigned char profile = (pcAdts[2] & 0xC0) >> 6; // 2 bits
if (profile == 3) {
return "";
}
// Get and check the 'sampling_frequency_index':
unsigned char sampling_frequency_index = (pcAdts[2] & 0x3C) >> 2; // 4 bits
if (samplingFrequencyTable[sampling_frequency_index] == 0) {
return "";
}
// Get and check the 'channel_configuration':
unsigned char channel_configuration = ((pcAdts[2] & 0x01) << 2)
| ((pcAdts[3] & 0xC0) >> 6); // 3 bits
unsigned char audioSpecificConfig[2];
unsigned char const audioObjectType = profile + 1;
audioSpecificConfig[0] = (audioObjectType << 3) | (sampling_frequency_index >> 1);
audioSpecificConfig[1] = (sampling_frequency_index << 7) | (channel_configuration << 3);
return string((char *)audioSpecificConfig,2);
}
void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts) {
uint8_t cfg1 = strAudioCfg[0];
uint8_t cfg2 = strAudioCfg[1];
static void parseAacConfig(const string &config, AdtsHeader &adts) {
uint8_t cfg1 = config[0];
uint8_t cfg2 = config[1];
int audioObjectType;
int sampling_frequency_index;
@ -93,9 +91,83 @@ void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts) {
adts.adts_buffer_fullness = 2047;
adts.no_raw_data_blocks_in_frame = 0;
}
void getAACInfo(const AACFrame &adts,int &iSampleRate,int &iChannel){
iSampleRate = samplingFrequencyTable[adts.sf_index];
iChannel = adts.channel_configuration;
string makeAacConfig(const uint8_t *hex, int length){
#ifndef ENABLE_MP4
if (!(hex[0] == 0xFF && (hex[1] & 0xF0) == 0xF0)) {
return "";
}
// Get and check the 'profile':
unsigned char profile = (hex[2] & 0xC0) >> 6; // 2 bits
if (profile == 3) {
return "";
}
// Get and check the 'sampling_frequency_index':
unsigned char sampling_frequency_index = (hex[2] & 0x3C) >> 2; // 4 bits
if (samplingFrequencyTable[sampling_frequency_index] == 0) {
return "";
}
// Get and check the 'channel_configuration':
unsigned char channel_configuration = ((hex[2] & 0x01) << 2) | ((hex[3] & 0xC0) >> 6); // 3 bits
unsigned char audioSpecificConfig[2];
unsigned char const audioObjectType = profile + 1;
audioSpecificConfig[0] = (audioObjectType << 3) | (sampling_frequency_index >> 1);
audioSpecificConfig[1] = (sampling_frequency_index << 7) | (channel_configuration << 3);
return string((char *)audioSpecificConfig,2);
#else
struct mpeg4_aac_t aac = {0};
if (mpeg4_aac_adts_load(hex, length, &aac) > 0) {
char buf[32] = {0};
int len = mpeg4_aac_audio_specific_config_save(&aac, (uint8_t *) buf, sizeof(buf));
if (len > 0) {
return string(buf, len);
}
}
WarnL << "生成aac config失败, adts header:" << hexdump(hex, length);
return "";
#endif
}
int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size) {
#ifndef ENABLE_MP4
AdtsHeader header;
parseAacConfig(config, header);
header.aac_frame_length = length;
dumpAdtsHeader(header, out);
return ADTS_HEADER_LEN;
#else
struct mpeg4_aac_t aac = {0};
int ret = mpeg4_aac_audio_specific_config_load((uint8_t *) config.data(), config.size(), &aac);
if (ret > 0) {
ret = mpeg4_aac_adts_save(&aac, length, out, out_size);
}
if (ret < 0) {
WarnL << "生成adts头失败:" << ret << ", aac config:" << hexdump(config.data(), config.size());
}
return ret;
#endif
}
bool parseAacConfig(const string &config, int &samplerate, int &channels){
#ifndef ENABLE_MP4
AdtsHeader header;
parseAacConfig(config, header);
samplerate = samplingFrequencyTable[header.sf_index];
channels = header.channel_configuration;
return true;
#else
struct mpeg4_aac_t aac = {0};
int ret = mpeg4_aac_audio_specific_config_load((uint8_t *) config.data(), config.size(), &aac);
if (ret > 0) {
samplerate = aac.sampling_frequency;
channels = aac.channels;
return true;
}
WarnL << "获取aac采样率、声道数失败:" << hexdump(config.data(), config.size());
return false;
#endif
}
Sdp::Ptr AACTrack::getSdp() {
@ -103,9 +175,7 @@ Sdp::Ptr AACTrack::getSdp() {
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<AACSdp>(getAacCfg(),getAudioSampleRate());
return std::make_shared<AACSdp>(getAacCfg(),getAudioSampleRate(), getAudioChannel());
}
}//namespace mediakit
}//namespace mediakit

View File

@ -13,98 +13,34 @@
#include "Frame.h"
#include "Track.h"
#define ADTS_HEADER_LEN 7
namespace mediakit{
class AACFrame;
unsigned const samplingFrequencyTable[16] = { 96000, 88200,
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350, 0, 0, 0 };
void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts);
void writeAdtsHeader(const AACFrame &adts, uint8_t *pcAdts) ;
string makeAdtsConfig(const uint8_t *pcAdts);
void getAACInfo(const AACFrame &adts,int &iSampleRate,int &iChannel);
string makeAacConfig(const uint8_t *hex, int length);
int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size);
bool parseAacConfig(const string &config, int &samplerate, int &channels);
/**
* aac帧adts头
*/
class AACFrame : public Frame {
class AACFrame : public FrameImp {
public:
typedef std::shared_ptr<AACFrame> Ptr;
AACFrame(){
_codecid = CodecAAC;
}
};
char *data() const override{
return (char *)buffer;
}
uint32_t size() const override {
return aac_frame_length;
}
uint32_t dts() const override {
return timeStamp;
}
uint32_t prefixSize() const override{
return iPrefixSize;
}
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return CodecAAC;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
public:
unsigned int syncword = 0; //12 bslbf 同步字The bit string 1111 1111 1111说明一个ADTS帧的开始
unsigned int id; //1 bslbf MPEG 标示符, 设置为1
unsigned int layer; //2 uimsbf Indicates which layer is used. Set to 00
unsigned int protection_absent; //1 bslbf 表示是否误码校验
unsigned int profile; //2 uimsbf 表示使用哪个级别的AAC如01 Low Complexity(LC)--- AACLC
unsigned int sf_index; //4 uimsbf 表示使用的采样率下标
unsigned int private_bit; //1 bslbf
unsigned int channel_configuration; //3 uimsbf 表示声道数
unsigned int original; //1 bslbf
unsigned int home; //1 bslbf
//下面的为改变的参数即每一帧都不同
unsigned int copyright_identification_bit; //1 bslbf
unsigned int copyright_identification_start; //1 bslbf
unsigned int aac_frame_length; // 13 bslbf 一个ADTS帧的长度包括ADTS头和raw data block
unsigned int adts_buffer_fullness; //11 bslbf 0x7FF 说明是码率可变的码流
//no_raw_data_blocks_in_frame 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧.
//所以说number_of_raw_data_blocks_in_frame == 0
//表示说ADTS帧中有一个AAC数据块并不是说没有。(一个AAC原始帧包含一段时间内1024个采样及相关数据)
unsigned int no_raw_data_blocks_in_frame; //2 uimsfb
unsigned char buffer[2 * 1024 + 7];
uint32_t timeStamp;
uint32_t iPrefixSize = 7;
} ;
class AACFrameNoCacheAble : public FrameNoCacheAble {
class AACFrameNoCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<AACFrameNoCacheAble> Ptr;
AACFrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts,uint32_t pts = 0,int prefixeSize = 7){
AACFrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts,uint32_t pts = 0,int prefix_size = ADTS_HEADER_LEN){
_ptr = ptr;
_size = size;
_dts = dts;
_prefixSize = prefixeSize;
}
TrackType getTrackType() const override{
return TrackAudio;
_prefix_size = prefix_size;
}
CodecId getCodecId() const override{
@ -118,8 +54,7 @@ public:
bool configFrame() const override{
return false;
}
} ;
};
/**
* aac音频通道
@ -136,44 +71,25 @@ public:
/**
* aac类型的媒体
* @param aac_cfg aac两个字节的配置信息
* @param aac_cfg aac配置信息
*/
AACTrack(const string &aac_cfg){
if(aac_cfg.size() < 2){
setAacCfg(aac_cfg);
}
/**
* aac
*/
void setAacCfg(const string &aac_cfg){
if (aac_cfg.size() < 2) {
throw std::invalid_argument("adts配置必须最少2个字节");
}
_cfg = aac_cfg.substr(0,2);
_cfg = aac_cfg;
onReady();
}
/**
* aac类型的媒体
* @param adts_header adts头7
* @param adts_header_len adts头长度7
*/
AACTrack(const char *adts_header,int adts_header_len = 7){
if(adts_header_len < 7){
throw std::invalid_argument("adts头必须不少于7个字节");
}
_cfg = makeAdtsConfig((uint8_t*)adts_header);
onReady();
}
/**
* aac类型的媒体
* @param aac_frame_with_adts adts头的aac帧
*/
AACTrack(const Frame::Ptr &aac_frame_with_adts){
if(aac_frame_with_adts->getCodecId() != CodecAAC || aac_frame_with_adts->prefixSize() < 7){
throw std::invalid_argument("必须输入带adts头的aac帧");
}
_cfg = makeAdtsConfig((uint8_t*)aac_frame_with_adts->data());
onReady();
}
/**
* aac两个字节的配置
* @return
* aac
*/
const string &getAacCfg() const{
return _cfg;
@ -181,7 +97,6 @@ public:
/**
*
* @return
*/
CodecId getCodecId() const override{
return CodecAAC;
@ -189,45 +104,42 @@ public:
/**
* aac_cfg前是无效的Track
* @return
*/
bool ready() override {
return !_cfg.empty();
}
/**
*
* @return
*/
*
*/
int getAudioSampleRate() const override{
return _sampleRate;
}
/**
* 168
* @return
*/
int getAudioSampleBit() const override{
return _sampleBit;
}
/**
*
* @return
*/
int getAudioChannel() const override{
return _channel;
}
/**
* ,aac_cfg
* @param frame
*/
* ,aac_cfg
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override{
if (_cfg.empty()) {
//未获取到aac_cfg信息
if (frame->prefixSize() >= 7) {
if (frame->prefixSize()) {
//7个字节的adts头
_cfg = makeAdtsConfig((uint8_t *)(frame->data()));
_cfg = makeAacConfig((uint8_t *) (frame->data()), frame->prefixSize());
onReady();
} else {
WarnL << "无法获取adts头!";
@ -240,13 +152,12 @@ private:
* 2aac配置
*/
void onReady(){
if(_cfg.size() < 2){
if (_cfg.size() < 2) {
return;
}
AACFrame aacFrame;
makeAdtsHeader(_cfg,aacFrame);
getAACInfo(aacFrame,_sampleRate,_channel);
parseAacConfig(_cfg, _sampleRate, _channel);
}
Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
}
@ -260,43 +171,42 @@ private:
int _channel = 0;
};
/**
* aac类型SDP
*/
* aac类型SDP
*/
class AACSdp : public Sdp {
public:
/**
*
*
* @param aac_cfg aac两个字节的配置描述
* @param sample_rate
* @param playload_type rtp playload type 98
* @param payload_type rtp payload type 98
* @param bitrate
*/
AACSdp(const string &aac_cfg,
int sample_rate,
int playload_type = 98,
int bitrate = 128) : Sdp(sample_rate,playload_type){
_printer << "m=audio 0 RTP/AVP " << playload_type << "\r\n";
int channels,
int payload_type = 98,
int bitrate = 128) : Sdp(sample_rate,payload_type){
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
_printer << "b=AS:" << bitrate << "\r\n";
_printer << "a=rtpmap:" << playload_type << " MPEG4-GENERIC/" << sample_rate << "\r\n";
_printer << "a=rtpmap:" << payload_type << " MPEG4-GENERIC/" << sample_rate << "/" << channels << "\r\n";
char configStr[32] = {0};
snprintf(configStr, sizeof(configStr), "%02X%02X", (uint8_t)aac_cfg[0], (uint8_t)aac_cfg[1]);
_printer << "a=fmtp:" << playload_type << " streamtype=5;profile-level-id=1;mode=AAC-hbr;"
<< "sizelength=13;indexlength=3;indexdeltalength=3;config="
<< configStr << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n";
string configStr;
char buf[4] = {0};
for(auto &ch : aac_cfg){
snprintf(buf, sizeof(buf), "%02X", (uint8_t)ch);
configStr.append(buf);
}
_printer << "a=fmtp:" << payload_type << " streamtype=5;profile-level-id=1;mode=AAC-hbr;"
<< "sizelength=13;indexlength=3;indexdeltalength=3;config=" << configStr << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
}
string getSdp() const override {
return _printer;
}
TrackType getTrackType() const override {
return TrackAudio;
}
CodecId getCodecId() const override {
return CodecAAC;
}
@ -305,6 +215,4 @@ private:
};
}//namespace mediakit
#endif //ZLMEDIAKIT_AAC_H
#endif //ZLMEDIAKIT_AAC_H

View File

@ -13,18 +13,6 @@
namespace mediakit{
AACRtmpDecoder::AACRtmpDecoder() {
_adts = obtainFrame();
}
AACFrame::Ptr AACRtmpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<AACFrame>::obtainObj();
frame->aac_frame_length = 7;
frame->iPrefixSize = 7;
return frame;
}
static string getAacCfg(const RtmpPacket &thiz) {
string ret;
if (thiz.getMediaType() != FLV_CODEC_AAC) {
@ -37,11 +25,11 @@ static string getAacCfg(const RtmpPacket &thiz) {
WarnL << "bad aac cfg!";
return ret;
}
ret = thiz.strBuf.substr(2, 2);
ret = thiz.strBuf.substr(2);
return ret;
}
bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool key_pos) {
bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) {
if (pkt->isCfgFrame()) {
_aac_cfg = getAacCfg(*pkt);
return false;
@ -52,26 +40,28 @@ bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool key_pos) {
return false;
}
void AACRtmpDecoder::onGetAAC(const char* pcData, int iLen, uint32_t ui32TimeStamp) {
if(iLen + 7 > sizeof(_adts->buffer)){
WarnL << "Illegal adts data, exceeding the length limit.";
return;
void AACRtmpDecoder::onGetAAC(const char* data, int len, uint32_t stamp) {
auto frame = ResourcePoolHelper<AACFrame>::obtainObj();
//生成adts头
char adts_header[32] = {0};
auto size = dumpAacConfig(_aac_cfg, len, (uint8_t *) adts_header, sizeof(adts_header));
if (size > 0) {
frame->_buffer.assign(adts_header, size);
frame->_prefix_size = size;
} else {
frame->_buffer.clear();
frame->_prefix_size = 0;
}
//写adts结构头
makeAdtsHeader(_aac_cfg,*_adts);
//拷贝aac负载
memcpy(_adts->buffer + 7, pcData, iLen);
_adts->aac_frame_length = 7 + iLen;
_adts->timeStamp = ui32TimeStamp;
//adts结构头转成头7个字节
writeAdtsHeader(*_adts, _adts->buffer);
//追加负载数据
frame->_buffer.append(data, len);
frame->_dts = stamp;
//写入环形缓存
RtmpCodec::inputFrame(_adts);
_adts = obtainFrame();
RtmpCodec::inputFrame(frame);
}
/////////////////////////////////////////////////////////////////////////////////////
AACRtmpEncoder::AACRtmpEncoder(const Track::Ptr &track) {
@ -91,9 +81,9 @@ void AACRtmpEncoder::makeConfigPacket() {
void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (_aac_cfg.empty()) {
if (frame->prefixSize() >= 7) {
if (frame->prefixSize()) {
//包含adts头,从adts头获取aac配置信息
_aac_cfg = makeAdtsConfig((uint8_t *)(frame->data()));
_aac_cfg = makeAacConfig((uint8_t *) (frame->data()), frame->prefixSize());
}
makeConfigPacket();
}

View File

@ -23,7 +23,7 @@ class AACRtmpDecoder : public RtmpCodec , public ResourcePoolHelper<AACFrame> {
public:
typedef std::shared_ptr<AACRtmpDecoder> Ptr;
AACRtmpDecoder();
AACRtmpDecoder() {}
~AACRtmpDecoder() {}
/**
@ -33,19 +33,14 @@ public:
*/
bool inputRtmp(const RtmpPacket::Ptr &Rtmp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return CodecAAC;
}
protected:
void onGetAAC(const char* pcData, int iLen, uint32_t ui32TimeStamp);
AACFrame::Ptr obtainFrame();
protected:
AACFrame::Ptr _adts;
private:
void onGetAAC(const char *data, int len, uint32_t stamp);
private:
string _aac_cfg;
};
@ -76,11 +71,14 @@ public:
* config包
*/
void makeConfigPacket() override;
private:
void makeAudioConfigPkt();
private:
uint8_t _audio_flv_flags;
AACTrack::Ptr _track;
string _aac_cfg;
};
}//namespace mediakit

View File

@ -9,19 +9,19 @@
*/
#include "AACRtp.h"
#define ADTS_HEADER_LEN 7
#define AAC_MAX_FRAME_SIZE (2 * 1024)
namespace mediakit{
AACRtpEncoder::AACRtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc,
ui32MtuSize,
ui32SampleRate,
ui8PlayloadType,
ui8PayloadType,
ui8Interleaved){
}
@ -56,32 +56,30 @@ void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
}
void AACRtpEncoder::makeAACRtp(const void *data, unsigned int len, bool mark, uint32_t uiStamp) {
RtpCodec::inputRtp(makeRtp(getTrackType(),data,len,mark,uiStamp), false);
RtpCodec::inputRtp(makeRtp(getTrackType(), data, len, mark, uiStamp), false);
}
/////////////////////////////////////////////////////////////////////////////////////
AACRtpDecoder::AACRtpDecoder(const Track::Ptr &track){
AACRtpDecoder::AACRtpDecoder(const Track::Ptr &track) {
auto aacTrack = dynamic_pointer_cast<AACTrack>(track);
if(!aacTrack || !aacTrack->ready()){
if (!aacTrack || !aacTrack->ready()) {
WarnL << "该aac track无效!";
}else{
} else {
_aac_cfg = aacTrack->getAacCfg();
}
_adts = obtainFrame();
_frame = obtainFrame();
}
AACRtpDecoder::AACRtpDecoder() {
_adts = obtainFrame();
_frame = obtainFrame();
}
AACFrame::Ptr AACRtpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<AACFrame>::obtainObj();
frame->aac_frame_length = ADTS_HEADER_LEN;
frame->iPrefixSize = ADTS_HEADER_LEN;
if(frame->syncword == 0 && !_aac_cfg.empty()) {
makeAdtsHeader(_aac_cfg,*frame);
}
frame->_prefix_size = 0;
frame->_buffer.clear();
return frame;
}
@ -96,20 +94,18 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool key_pos) {
//忽略Au-Header区
ptr += 2 + au_header_count * 2;
static const uint32_t max_size = sizeof(AACFrame::buffer) - ADTS_HEADER_LEN;
while (ptr < end) {
auto size = (uint32_t) (end - ptr);
if(size > max_size){
size = max_size;
if (size > AAC_MAX_FRAME_SIZE) {
size = AAC_MAX_FRAME_SIZE;
}
if (_adts->aac_frame_length + size > sizeof(AACFrame::buffer)) {
if (_frame->size() + size > AAC_MAX_FRAME_SIZE) {
//数据太多了,先清空
flushData();
}
//追加aac数据
memcpy(_adts->buffer + _adts->aac_frame_length, ptr, size);
_adts->aac_frame_length += size;
_adts->timeStamp = rtppack->timeStamp;
_frame->_buffer.append((char *) ptr, size);
_frame->_dts = rtppack->timeStamp;
ptr += size;
}
@ -120,15 +116,22 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool key_pos) {
return false;
}
void AACRtpDecoder::flushData() {
if(_adts->aac_frame_length == ADTS_HEADER_LEN){
if (_frame->_buffer.empty()) {
//没有有效数据
return;
}
writeAdtsHeader(*_adts, _adts->buffer);
RtpCodec::inputFrame(_adts);
_adts = obtainFrame();
//插入adts头
char adts_header[32] = {0};
auto size = dumpAacConfig(_aac_cfg, _frame->_buffer.size(), (uint8_t *) adts_header, sizeof(adts_header));
if (size > 0) {
//插入adts头
_frame->_buffer.insert(0, adts_header, size);
_frame->_prefix_size = size;
}
RtpCodec::inputFrame(_frame);
_frame = obtainFrame();
}

View File

@ -31,19 +31,19 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
CodecId getCodecId() const override {
return CodecAAC;
}
protected:
AACRtpDecoder();
private:
AACFrame::Ptr obtainFrame();
void flushData();
private:
AACFrame::Ptr _adts;
AACFrame::Ptr _frame;
string _aac_cfg;
};
@ -59,13 +59,13 @@ public:
* @param ui32Ssrc ssrc
* @param ui32MtuSize mtu
* @param ui32SampleRate
* @param ui8PlayloadType pt类型
* @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved
*/
AACRtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType = 97,
uint8_t ui8PayloadType = 97,
uint8_t ui8Interleaved = TrackAudio * 2);
~AACRtpEncoder() {}
@ -74,8 +74,10 @@ public:
* @param frame dats头的aac数据
*/
void inputFrame(const Frame::Ptr &frame) override;
private:
void makeAACRtp(const void *pData, unsigned int uiLen, bool bMark, uint32_t uiStamp);
private:
unsigned char _aucSectionBuf[1600];
};

View File

@ -33,17 +33,12 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
return nullptr;
}
string aac_cfg;
unsigned int cfg1;
sscanf(aac_cfg_str.substr(0, 2).data(), "%02X", &cfg1);
cfg1 &= 0x00FF;
aac_cfg.push_back(cfg1);
unsigned int cfg2;
sscanf(aac_cfg_str.substr(2, 2).data(), "%02X", &cfg2);
cfg2 &= 0x00FF;
aac_cfg.push_back(cfg2);
for(int i = 0 ; i < aac_cfg_str.size() / 2 ; ++i ){
unsigned int cfg;
sscanf(aac_cfg_str.substr(i * 2, 2).data(), "%02X", &cfg);
cfg &= 0x00FF;
aac_cfg.push_back((char)cfg);
}
return std::make_shared<AACTrack>(aac_cfg);
}
@ -115,7 +110,7 @@ RtpCodec::Ptr Factory::getRtpEncoderBySdp(const Sdp::Ptr &sdp) {
}
auto mtu = (sdp->getTrackType() == TrackVideo ? video_mtu : audio_mtu);
auto sample_rate = sdp->getSampleRate();
auto pt = sdp->getPlayloadType();
auto pt = sdp->getPayloadType();
auto interleaved = sdp->getTrackType() * 2;
auto codec_id = sdp->getCodecId();
switch (codec_id){
@ -221,13 +216,27 @@ Track::Ptr Factory::getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int
return getTrackByCodecId(codecId, sample_rate, channels, sample_bit);
}
RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track) {
RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track, bool is_encode) {
switch (track->getCodecId()){
case CodecH264 : return std::make_shared<H264RtmpEncoder>(track);
case CodecAAC : return std::make_shared<AACRtmpEncoder>(track);
case CodecH265 : return std::make_shared<H265RtmpEncoder>(track);
case CodecG711A :
case CodecG711U : return std::make_shared<G711RtmpEncoder>(track);
case CodecG711A :
case CodecG711U : {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if (is_encode && (audio_track->getAudioSampleRate() != 8000 ||
audio_track->getAudioChannel() != 1 ||
audio_track->getAudioSampleBit() != 16)) {
//rtmp对g711只支持8000/1/16规格但是ZLMediaKit可以解析其他规格的G711
WarnL << "RTMP只支持8000/1/16规格的G711,目前规格是:"
<< audio_track->getAudioSampleRate() << "/"
<< audio_track->getAudioChannel() << "/"
<< audio_track->getAudioSampleBit()
<< ",该音频已被忽略";
return nullptr;
}
return std::make_shared<G711RtmpEncoder>(track);
}
default : WarnL << "暂不支持该CodecId:" << track->getCodecName(); return nullptr;
}
}

View File

@ -59,8 +59,9 @@ public:
/**
* Track获取Rtmp的编解码器
* @param track
* @param is_encode
*/
static RtmpCodec::Ptr getRtmpCodecByTrack(const Track::Ptr &track);
static RtmpCodec::Ptr getRtmpCodecByTrack(const Track::Ptr &track, bool is_encode);
/**
* codecId获取rtmp的codec描述

View File

@ -15,6 +15,59 @@ using namespace toolkit;
namespace mediakit{
/**
*
*/
class FrameCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<FrameCacheAble> Ptr;
FrameCacheAble(const Frame::Ptr &frame){
if(frame->cacheAble()){
_frame = frame;
_ptr = frame->data();
}else{
_buffer = std::make_shared<BufferRaw>();
_buffer->assign(frame->data(),frame->size());
_ptr = _buffer->data();
}
_size = frame->size();
_dts = frame->dts();
_pts = frame->pts();
_prefix_size = frame->prefixSize();
_codecid = frame->getCodecId();
_key = frame->keyFrame();
_config = frame->configFrame();
}
virtual ~FrameCacheAble() = default;
/**
*
*/
bool cacheAble() const override {
return true;
}
CodecId getCodecId() const override{
return _codecid;
}
bool keyFrame() const override{
return _key;
}
bool configFrame() const override{
return _config;
}
private:
Frame::Ptr _frame;
BufferRaw::Ptr _buffer;
CodecId _codecid;
bool _key;
bool _config;
};
Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){
if(frame->cacheAble()){
return frame;
@ -23,17 +76,35 @@ Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){
}
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id
const char *CodecInfo::getCodecName() {
switch (getCodecId()) {
const char *getCodecName(CodecId codecId) {
switch (codecId) {
SWITCH_CASE(CodecH264);
SWITCH_CASE(CodecH265);
SWITCH_CASE(CodecAAC);
SWITCH_CASE(CodecG711A);
SWITCH_CASE(CodecG711U);
default:
return "unknown codec";
SWITCH_CASE(CodecOpus);
default : return "unknown codec";
}
}
}//namespace mediakit
TrackType getTrackType(CodecId codecId){
switch (codecId){
case CodecH264:
case CodecH265: return TrackVideo;
case CodecAAC:
case CodecG711A:
case CodecG711U:
case CodecOpus: return TrackAudio;
default: return TrackInvalid;
}
}
const char *CodecInfo::getCodecName() {
return mediakit::getCodecName(getCodecId());
}
TrackType CodecInfo::getTrackType() {
return mediakit::getTrackType(getCodecId());
}
}//namespace mediakit

View File

@ -28,6 +28,7 @@ typedef enum {
CodecAAC,
CodecG711A,
CodecG711U,
CodecOpus,
CodecMax = 0x7FFF
} CodecId;
@ -39,6 +40,16 @@ typedef enum {
TrackMax = 3
} TrackType;
/**
*
*/
const char *getCodecName(CodecId codecId);
/**
*
*/
TrackType getTrackType(CodecId codecId);
/**
*
*/
@ -49,11 +60,6 @@ public:
CodecInfo(){}
virtual ~CodecInfo(){}
/**
*
*/
virtual TrackType getTrackType() const = 0;
/**
*
*/
@ -61,9 +67,13 @@ public:
/**
*
* @return
*/
const char *getCodecName();
/**
*
*/
TrackType getTrackType();
};
/**
@ -76,15 +86,11 @@ public:
/**
*
* @return
*/
virtual uint32_t dts() const = 0;
/**
*
* @return
*/
virtual uint32_t pts() const {
return dts();
@ -98,13 +104,11 @@ public:
/**
*
* @return
*/
virtual bool keyFrame() const = 0;
/**
* sps pps vps
* @return
*/
virtual bool configFrame() const = 0;
@ -115,14 +119,77 @@ public:
/**
* frame
* @return
*/
static Ptr getCacheAbleFrame(const Ptr &frame);
};
class FrameImp : public Frame {
public:
typedef std::shared_ptr<FrameImp> Ptr;
char *data() const override{
return (char *)_buffer.data();
}
uint32_t size() const override {
return _buffer.size();
}
uint32_t dts() const override {
return _dts;
}
uint32_t pts() const override{
return _pts ? _pts : _dts;
}
uint32_t prefixSize() const override{
return _prefix_size;
}
CodecId getCodecId() const override{
return _codecid;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
public:
CodecId _codecid = CodecInvalid;
string _buffer;
uint32_t _dts = 0;
uint32_t _pts = 0;
uint32_t _prefix_size = 0;
};
/**
* Frame类中可以有多个帧 0x 00 00 01
* ZLMediaKit会先把这种复合帧split成单个帧然后再处理
* Frame
*
*/
template<typename Parent>
class FrameInternal : public Parent{
public:
typedef std::shared_ptr<FrameInternal> Ptr;
FrameInternal(const Frame::Ptr &parent_frame, char *ptr, uint32_t size, int prefix_size)
: Parent(ptr, size, parent_frame->dts(), parent_frame->pts(), prefix_size) {
_parent_frame = parent_frame;
}
bool cacheAble() const override {
return _parent_frame->cacheAble();
}
private:
Frame::Ptr _parent_frame;
};
/**
*
* @tparam T
*/
template <typename T>
class ResourcePoolHelper{
@ -140,18 +207,17 @@ private:
};
/**
*
*
*/
class FrameWriterInterface {
public:
typedef std::shared_ptr<FrameWriterInterface> Ptr;
FrameWriterInterface(){}
virtual ~FrameWriterInterface(){}
/**
*
* @param frame
*/
*
*/
virtual void inputFrame(const Frame::Ptr &frame) = 0;
};
@ -165,16 +231,16 @@ public:
/**
* inputFrame后触发onWriteFrame回调
* @param cb
*/
FrameWriterInterfaceHelper(const onWriteFrame& cb){
_writeCallback = cb;
}
virtual ~FrameWriterInterfaceHelper(){}
/**
*
* @param frame
*/
*
*/
void inputFrame(const Frame::Ptr &frame) override {
_writeCallback(frame);
}
@ -182,7 +248,6 @@ private:
onWriteFrame _writeCallback;
};
/**
*
*/
@ -193,6 +258,9 @@ public:
FrameDispatcher(){}
virtual ~FrameDispatcher(){}
/**
*
*/
void addDelegate(const FrameWriterInterface::Ptr &delegate){
//_delegates_write可能多线程同时操作
lock_guard<mutex> lck(_mtx);
@ -200,7 +268,10 @@ public:
_need_update = true;
}
void delDelegate(void *ptr){
/**
*
*/
void delDelegate(FrameWriterInterface *ptr){
//_delegates_write可能多线程同时操作
lock_guard<mutex> lck(_mtx);
_delegates_write.erase(ptr);
@ -208,8 +279,7 @@ public:
}
/**
*
* @param frame
*
*/
void inputFrame(const Frame::Ptr &frame) override{
if(_need_update){
@ -223,7 +293,13 @@ public:
for(auto &pr : _delegates_read){
pr.second->inputFrame(frame);
}
}
/**
*
*/
int size() const {
return _delegates_write.size();
}
private:
mutex _mtx;
@ -250,105 +326,23 @@ public:
}
uint32_t pts() const override{
if(_pts){
return _pts;
}
return dts();
return _pts ? _pts : dts();
}
uint32_t prefixSize() const override{
return _prefixSize;
return _prefix_size;
}
bool cacheAble() const override {
return false;
}
protected:
char *_ptr;
uint32_t _size;
uint32_t _dts;
uint32_t _pts = 0;
uint32_t _prefixSize;
uint32_t _prefix_size;
};
/**
* DevChannel类中有用到
* 使
* ZLMediaKit是同步对帧数据进行使用和处理的
*
* Frame::getCacheAbleFrame方法拷贝一个可缓存的帧
*/
class FrameNoCacheAble : public FrameFromPtr{
public:
typedef std::shared_ptr<FrameNoCacheAble> Ptr;
/**
*
* @return
*/
bool cacheAble() const override {
return false;
}
};
/**
*
* @see FrameNoCacheAble
*/
class FrameCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<FrameCacheAble> Ptr;
FrameCacheAble(const Frame::Ptr &frame){
if(frame->cacheAble()){
_frame = frame;
_ptr = frame->data();
}else{
_buffer = std::make_shared<BufferRaw>();
_buffer->assign(frame->data(),frame->size());
_ptr = _buffer->data();
}
_size = frame->size();
_dts = frame->dts();
_pts = frame->pts();
_prefixSize = frame->prefixSize();
_trackType = frame->getTrackType();
_codec = frame->getCodecId();
_key = frame->keyFrame();
_config = frame->configFrame();
}
virtual ~FrameCacheAble() = default;
/**
*
* @return
*/
bool cacheAble() const override {
return true;
}
TrackType getTrackType() const override{
return _trackType;
}
CodecId getCodecId() const override{
return _codec;
}
bool keyFrame() const override{
return _key;
}
bool configFrame() const override{
return _config;
}
private:
Frame::Ptr _frame;
BufferRaw::Ptr _buffer;
TrackType _trackType;
CodecId _codec;
bool _key;
bool _config;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_FRAME_H
#endif //ZLMEDIAKIT_FRAME_H

View File

@ -19,76 +19,28 @@ namespace mediakit{
/**
* G711帧
*/
class G711Frame : public Frame {
class G711Frame : public FrameImp {
public:
typedef std::shared_ptr<G711Frame> Ptr;
char *data() const override{
return (char *)buffer.data();
G711Frame(){
_codecid = CodecG711A;
}
};
uint32_t size() const override {
return buffer.size();
}
uint32_t dts() const override {
return timeStamp;
}
uint32_t prefixSize() const override{
return 0;
}
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return _codecId;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
public:
CodecId _codecId = CodecG711A;
string buffer;
uint32_t timeStamp;
} ;
class G711FrameNoCacheAble : public FrameNoCacheAble {
class G711FrameNoCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<G711FrameNoCacheAble> Ptr;
//兼容通用接口
G711FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts, uint32_t pts = 0,int prefixeSize = 0){
G711FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts, uint32_t pts = 0,int prefix_size = 0){
_ptr = ptr;
_size = size;
_dts = dts;
_prefixSize = prefixeSize;
_prefix_size = prefix_size;
}
//兼容通用接口
void setCodec(CodecId codecId){
_codecId = codecId;
}
G711FrameNoCacheAble(CodecId codecId, char *ptr,uint32_t size,uint32_t dts,int prefixeSize = 0){
_codecId = codecId;
_ptr = ptr;
_size = size;
_dts = dts;
_prefixSize = prefixeSize;
}
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return _codecId;
}
@ -108,67 +60,18 @@ private:
/**
* G711音频通道
*/
class G711Track : public AudioTrack{
class G711Track : public AudioTrackImp{
public:
typedef std::shared_ptr<G711Track> Ptr;
/**
* G711A G711U
*/
G711Track(CodecId codecId,int sample_rate, int channels, int sample_bit){
_codecid = codecId;
_sample_rate = sample_rate;
_channels = channels;
_sample_bit = sample_bit;
}
/**
*
*/
CodecId getCodecId() const override{
return _codecid;
}
/**
*
*/
bool ready() override {
return true;
}
/**
*
*/
int getAudioSampleRate() const override{
return _sample_rate;
}
/**
* 168
*/
int getAudioSampleBit() const override{
return _sample_bit;
}
/**
*
*/
int getAudioChannel() const override{
return _channels;
}
G711Track(CodecId codecId,int sample_rate, int channels, int sample_bit) : AudioTrackImp(codecId,sample_rate,channels,sample_bit){}
private:
//克隆该Track
Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
}
//生成sdp
Sdp::Ptr getSdp() override ;
private:
CodecId _codecid;
int _sample_rate;
int _channels;
int _sample_bit;
};
/**
@ -180,37 +83,30 @@ public:
* G711采样率固定为8000
* @param codecId G711A G711U
* @param sample_rate
* @param playload_type rtp playload
* @param payload_type rtp payload
* @param bitrate
*/
G711Sdp(CodecId codecId,
int sample_rate,
int channels,
int playload_type = 98,
int bitrate = 128) : Sdp(sample_rate,playload_type), _codecId(codecId){
_printer << "m=audio 0 RTP/AVP " << playload_type << "\r\n";
_printer << "a=rtpmap:" << playload_type << (codecId == CodecG711A ? " PCMA/" : " PCMU/") << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n";
int payload_type = 98,
int bitrate = 128) : Sdp(sample_rate,payload_type), _codecId(codecId){
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
_printer << "a=rtpmap:" << payload_type << (codecId == CodecG711A ? " PCMA/" : " PCMU/") << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
}
string getSdp() const override {
return _printer;
}
TrackType getTrackType() const override {
return TrackAudio;
}
CodecId getCodecId() const override {
return _codecId;
}
private:
_StrPrinter _printer;
CodecId _codecId;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_AAC_H
#endif //ZLMEDIAKIT_G711_H

View File

@ -20,15 +20,15 @@ G711RtmpDecoder::G711RtmpDecoder(CodecId codecId) {
G711Frame::Ptr G711RtmpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<G711Frame>::obtainObj();
frame->buffer.clear();
frame->_codecId = _codecId;
frame->_buffer.clear();
frame->_codecid = _codecId;
return frame;
}
bool G711RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) {
//拷贝G711负载
_frame->buffer.assign(pkt->strBuf.data() + 1, pkt->strBuf.size() - 1);
_frame->timeStamp = pkt->timeStamp;
_frame->_buffer.assign(pkt->strBuf.data() + 1, pkt->strBuf.size() - 1);
_frame->_dts = pkt->timeStamp;
//写入环形缓存
RtmpCodec::inputFrame(_frame);
_frame = obtainFrame();

View File

@ -33,10 +33,6 @@ public:
*/
bool inputRtmp(const RtmpPacket::Ptr &Rtmp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return _codecId;
}

View File

@ -20,9 +20,9 @@ G711RtpDecoder::G711RtpDecoder(const Track::Ptr &track){
G711Frame::Ptr G711RtpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<G711Frame>::obtainObj();
frame->buffer.clear();
frame->_codecId = _codecid;
frame->timeStamp = 0;
frame->_buffer.clear();
frame->_codecid = _codecid;
frame->_dts = 0;
return frame;
}
@ -32,17 +32,17 @@ bool G711RtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool) {
// 获取rtp数据
const char *rtp_packet_buf = rtppack->data() + rtppack->offset;
if (rtppack->timeStamp != _frame->timeStamp) {
if (rtppack->timeStamp != _frame->_dts) {
//时间戳变更,清空上一帧
onGetG711(_frame);
}
//追加数据
_frame->buffer.append(rtp_packet_buf, length);
_frame->_buffer.append(rtp_packet_buf, length);
//赋值时间戳
_frame->timeStamp = rtppack->timeStamp;
_frame->_dts = rtppack->timeStamp;
if (rtppack->mark || _frame->buffer.size() > 10 * 1024) {
if (rtppack->mark || _frame->_buffer.size() > 10 * 1024) {
//标记为mark时或者内存快溢出时我们认为这是该帧最后一个包
onGetG711(_frame);
}
@ -50,7 +50,7 @@ bool G711RtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool) {
}
void G711RtpDecoder::onGetG711(const G711Frame::Ptr &frame) {
if(!frame->buffer.empty()){
if(!frame->_buffer.empty()){
//写入环形缓存
RtpCodec::inputFrame(frame);
_frame = obtainFrame();
@ -62,12 +62,12 @@ void G711RtpDecoder::onGetG711(const G711Frame::Ptr &frame) {
G711RtpEncoder::G711RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc,
ui32MtuSize,
ui32SampleRate,
ui8PlayloadType,
ui8PayloadType,
ui8Interleaved) {
}
@ -96,6 +96,3 @@ void G711RtpEncoder::makeG711Rtp(const void *data, unsigned int len, bool mark,
}
}//namespace mediakit

View File

@ -31,10 +31,6 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return _codecid;
}
@ -62,13 +58,13 @@ public:
* @param ui32Ssrc ssrc
* @param ui32MtuSize mtu
* @param ui32SampleRate
* @param ui8PlayloadType pt类型
* @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved
*/
G711RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType = 0,
uint8_t ui8PayloadType = 0,
uint8_t ui8Interleaved = TrackAudio * 2);
~G711RtpEncoder() {}

View File

@ -44,34 +44,77 @@ const char *memfind(const char *buf, int len, const char *subbuf, int sublen) {
return NULL;
}
void splitH264(const char *ptr, int len, const std::function<void(const char *, int)> &cb) {
auto nal = ptr;
void splitH264(const char *ptr, int len, int prefix, const std::function<void(const char *, int, int)> &cb) {
auto start = ptr + prefix;
auto end = ptr + len;
while(true) {
auto next_nal = memfind(nal + 3,end - nal - 3,"\x0\x0\x1",3);
if(next_nal){
if(*(next_nal - 1) == 0x00){
next_nal -= 1;
int next_prefix;
while (true) {
auto next_start = memfind(start, end - start, "\x00\x00\x01", 3);
if (next_start) {
//找到下一帧
if (*(next_start - 1) == 0x00) {
//这个是00 00 00 01开头
next_start -= 1;
next_prefix = 4;
} else {
//这个是00 00 01开头
next_prefix = 3;
}
cb(nal,next_nal - nal);
nal = next_nal;
//记得加上本帧prefix长度
cb(start - prefix, next_start - start + prefix, prefix);
//搜索下一帧末尾的起始位置
start = next_start + next_prefix;
//记录下一帧的prefix长度
prefix = next_prefix;
continue;
}
cb(nal,end - nal);
//未找到下一帧,这是最后一帧
cb(start - prefix, end - start + prefix, prefix);
break;
}
}
int prefixSize(const char *ptr, int len){
if (len < 4) {
return 0;
}
if (ptr[0] != 0x00 || ptr[1] != 0x00) {
//不是0x00 00开头
return 0;
}
if (ptr[2] == 0x00 && ptr[3] == 0x01) {
//是0x00 00 00 01
return 4;
}
if (ptr[2] == 0x01) {
//是0x00 00 01
return 3;
}
return 0;
}
#if 0
//splitH264函数测试程序
static onceToken s_token([](){
char buf[] = "\x00\x00\x00\x01\x12\x23\x34\x45\x56"
"\x00\x00\x00\x01\x12\x23\x34\x45\x56"
"\x00\x00\x00\x01\x12\x23\x34\x45\x56"
"\x00\x00\x01\x12\x23\x34\x45\x56";
splitH264(buf, sizeof(buf) - 1, [](const char *ptr, int len){
cout << hexdump(ptr, len) << endl;
});
{
char buf[] = "\x00\x00\x00\x01\x12\x23\x34\x45\x56"
"\x00\x00\x00\x01\x23\x34\x45\x56"
"\x00\x00\x00\x01x34\x45\x56"
"\x00\x00\x01\x12\x23\x34\x45\x56";
splitH264(buf, sizeof(buf) - 1, 4, [](const char *ptr, int len, int prefix) {
cout << prefix << " " << hexdump(ptr, len) << endl;
});
}
{
char buf[] = "\x00\x00\x00\x01\x12\x23\x34\x45\x56";
splitH264(buf, sizeof(buf) - 1, 4, [](const char *ptr, int len, int prefix) {
cout << prefix << " " << hexdump(ptr, len) << endl;
});
}
});
#endif //0

View File

@ -20,12 +20,12 @@ using namespace toolkit;
namespace mediakit{
bool getAVCInfo(const string &strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
void splitH264(const char *ptr, int len, const std::function<void(const char *, int)> &cb);
void splitH264(const char *ptr, int len, int prefix, const std::function<void(const char *, int, int)> &cb);
int prefixSize(const char *ptr, int len);
/**
* 264
*/
class H264Frame : public Frame {
class H264Frame : public FrameImp {
public:
typedef std::shared_ptr<H264Frame> Ptr;
@ -36,30 +36,8 @@ public:
NAL_SEI = 6,
} NalType;
char *data() const override{
return (char *)_buffer.data();
}
uint32_t size() const override {
return _buffer.size();
}
uint32_t dts() const override {
return _dts;
}
uint32_t pts() const override {
return _pts ? _pts : _dts;
}
uint32_t prefixSize() const override{
return _prefix_size;
}
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{
return CodecH264;
H264Frame(){
_codecid = CodecH264;
}
bool keyFrame() const override {
@ -69,39 +47,27 @@ public:
bool configFrame() const override{
switch(H264_TYPE(_buffer[_prefix_size]) ){
case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS:
return true;
default:
return false;
case H264Frame::NAL_PPS:return true;
default:return false;
}
}
public:
uint32_t _dts = 0;
uint32_t _pts = 0;
uint32_t _prefix_size = 4;
string _buffer;
};
/**
* H264类
* Frame类
* DevChannel中有使用
*/
class H264FrameNoCacheAble : public FrameNoCacheAble {
class H264FrameNoCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<H264FrameNoCacheAble> Ptr;
H264FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts , uint32_t pts ,int prefixeSize = 4){
H264FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts , uint32_t pts ,int prefix_size = 4){
_ptr = ptr;
_size = size;
_dts = dts;
_pts = pts;
_prefixSize = prefixeSize;
}
TrackType getTrackType() const override{
return TrackVideo;
_prefix_size = prefix_size;
}
CodecId getCodecId() const override{
@ -109,43 +75,18 @@ public:
}
bool keyFrame() const override {
return H264_TYPE(_ptr[_prefixSize]) == H264Frame::NAL_IDR;
return H264_TYPE(_ptr[_prefix_size]) == H264Frame::NAL_IDR;
}
bool configFrame() const override{
switch(H264_TYPE(_ptr[_prefixSize])){
switch(H264_TYPE(_ptr[_prefix_size])){
case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS:
return true;
default:
return false;
case H264Frame::NAL_PPS:return true;
default:return false;
}
}
};
/**
* H264Frame类中可以有多个帧 0x 00 00 01
* ZLMediaKit会先把这种复合帧split成单个帧然后再处理
* H264FrameSubFrame
*
*/
template<typename Parent>
class FrameInternal : public Parent{
public:
typedef std::shared_ptr<FrameInternal> Ptr;
FrameInternal(const Frame::Ptr &parent_frame,
char *ptr,
uint32_t size,
int prefixeSize) : Parent(ptr,size,parent_frame->dts(),parent_frame->pts(),prefixeSize){
_parent_frame = parent_frame;
}
bool cacheAble() const override {
return _parent_frame->cacheAble();
}
private:
Frame::Ptr _parent_frame;
};
typedef FrameInternal<H264FrameNoCacheAble> H264FrameInternal;
/**
@ -243,25 +184,10 @@ public:
int type = H264_TYPE(*((uint8_t *)frame->data() + frame->prefixSize()));
if(type == H264Frame::NAL_SPS || type == H264Frame::NAL_SEI){
//有些设备会把SPS PPS IDR帧当做一个帧打包所以我们要split一下
bool first_frame = true;
splitH264(frame->data() + frame->prefixSize(),
frame->size() - frame->prefixSize(),
[&](const char *ptr, int len){
if(first_frame){
H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame,
frame->data(),
len + frame->prefixSize(),
frame->prefixSize());
inputFrame_l(sub_frame);
first_frame = false;
}else{
H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame,
(char *)ptr,
len ,
3);
inputFrame_l(sub_frame);
}
});
splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, int len, int prefix) {
H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame, (char *)ptr, len, prefix);
inputFrame_l(sub_frame);
});
} else{
inputFrame_l(frame);
}
@ -302,6 +228,11 @@ private:
}
break;
case H264Frame::NAL_SEI:{
//忽略SEI
break;
}
default:
VideoTrack::inputFrame(frame);
break;
@ -349,29 +280,27 @@ private:
bool _last_frame_is_idr = false;
};
/**
* h264类型sdp
*/
class H264Sdp : public Sdp {
public:
/**
*
* @param sps 264 sps,0x00000001
* @param pps 264 pps,0x00000001
* @param playload_type rtp playload type 96
* @param payload_type rtp payload type 96
* @param bitrate
*/
H264Sdp(const string &strSPS,
const string &strPPS,
int playload_type = 96,
int bitrate = 4000) : Sdp(90000,playload_type) {
int payload_type = 96,
int bitrate = 4000) : Sdp(90000,payload_type) {
//视频通道
_printer << "m=video 0 RTP/AVP " << playload_type << "\r\n";
_printer << "m=video 0 RTP/AVP " << payload_type << "\r\n";
_printer << "b=AS:" << bitrate << "\r\n";
_printer << "a=rtpmap:" << playload_type << " H264/" << 90000 << "\r\n";
_printer << "a=fmtp:" << playload_type << " packetization-mode=1; profile-level-id=";
_printer << "a=rtpmap:" << payload_type << " H264/" << 90000 << "\r\n";
_printer << "a=fmtp:" << payload_type << " packetization-mode=1; profile-level-id=";
char strTemp[100];
uint32_t profile_level_id = 0;
@ -390,17 +319,13 @@ public:
memset(strTemp, 0, 100);
av_base64_encode(strTemp, 100, (uint8_t *) strPPS.data(), strPPS.size());
_printer << strTemp << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n";
_printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
}
string getSdp() const override {
return _printer;
}
TrackType getTrackType() const override {
return TrackVideo;
}
CodecId getCodecId() const override {
return CodecH264;
}
@ -408,8 +333,5 @@ private:
_StrPrinter _printer;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_H264_H
#endif //ZLMEDIAKIT_H264_H

View File

@ -36,10 +36,6 @@ public:
*/
bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{
return CodecH264;
}

View File

@ -157,7 +157,7 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃
_h264frame->_buffer.clear();
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
WarnL << "rtp丢包: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false;
}
@ -204,12 +204,12 @@ void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) {
H264RtpEncoder::H264RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc,
ui32MtuSize,
ui32SampleRate,
ui8PlayloadType,
ui8PayloadType,
ui8Interleaved) {
}

View File

@ -38,10 +38,6 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{
return CodecH264;
}
@ -66,13 +62,13 @@ public:
* @param ui32Ssrc ssrc
* @param ui32MtuSize mtu大小
* @param ui32SampleRate 90000
* @param ui8PlayloadType pt类型
* @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved
*/
H264RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize = 1400,
uint32_t ui32SampleRate = 90000,
uint8_t ui8PlayloadType = 96,
uint8_t ui8PayloadType = 96,
uint8_t ui8Interleaved = TrackVideo * 2);
~H264RtpEncoder() {}

View File

@ -23,9 +23,9 @@ namespace mediakit {
bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
/**
* 265
*/
class H265Frame : public Frame {
* 265
*/
class H265Frame : public FrameImp {
public:
typedef std::shared_ptr<H265Frame> Ptr;
@ -60,32 +60,8 @@ public:
NAL_SEI_SUFFIX = 40,
} NaleType;
char *data() const override {
return (char *) _buffer.data();
}
uint32_t size() const override {
return _buffer.size();
}
uint32_t dts() const override {
return _dts;
}
uint32_t pts() const override {
return _pts ? _pts : _dts;
}
uint32_t prefixSize() const override {
return _prefix_size;
}
TrackType getTrackType() const override {
return TrackVideo;
}
CodecId getCodecId() const override {
return CodecH265;
H265Frame(){
_codecid = CodecH265;
}
bool keyFrame() const override {
@ -96,39 +72,26 @@ public:
switch(H265_TYPE(_buffer[_prefix_size])){
case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS:
return true;
default:
return false;
case H265Frame::NAL_PPS : return true;
default : return false;
}
}
static bool isKeyFrame(int type) {
return type >= NAL_BLA_W_LP && type <= NAL_RSV_IRAP_VCL23;
}
public:
uint32_t _dts = 0;
uint32_t _pts = 0;
uint32_t _prefix_size = 4;
string _buffer;
};
class H265FrameNoCacheAble : public FrameNoCacheAble {
class H265FrameNoCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<H265FrameNoCacheAble> Ptr;
H265FrameNoCacheAble(char *ptr, uint32_t size, uint32_t dts,uint32_t pts, int prefixeSize = 4) {
H265FrameNoCacheAble(char *ptr, uint32_t size, uint32_t dts,uint32_t pts, int prefix_size = 4) {
_ptr = ptr;
_size = size;
_dts = dts;
_pts = pts;
_prefixSize = prefixeSize;
}
TrackType getTrackType() const override {
return TrackVideo;
_prefix_size = prefix_size;
}
CodecId getCodecId() const override {
@ -136,17 +99,15 @@ public:
}
bool keyFrame() const override {
return H265Frame::isKeyFrame(H265_TYPE(((uint8_t *) _ptr)[_prefixSize]));
return H265Frame::isKeyFrame(H265_TYPE(((uint8_t *) _ptr)[_prefix_size]));
}
bool configFrame() const override{
switch(H265_TYPE(((uint8_t *) _ptr)[_prefixSize])){
switch(H265_TYPE(((uint8_t *) _ptr)[_prefix_size])){
case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS:
return true;
default:
return false;
case H265Frame::NAL_PPS:return true;
default:return false;
}
}
};
@ -184,7 +145,6 @@ public:
/**
* 0x00 00 00 01vps
* @return
*/
const string &getVps() const {
return _vps;
@ -192,7 +152,6 @@ public:
/**
* 0x00 00 00 01sps
* @return
*/
const string &getSps() const {
return _sps;
@ -200,7 +159,6 @@ public:
/**
* 0x00 00 00 01pps
* @return
*/
const string &getPps() const {
return _pps;
@ -212,7 +170,6 @@ public:
/**
*
* @return
*/
int getVideoHeight() const override{
return _height ;
@ -220,7 +177,6 @@ public:
/**
*
* @return
*/
int getVideoWidth() const override{
return _width;
@ -228,7 +184,6 @@ public:
/**
* fps
* @return
*/
float getVideoFps() const override{
return _fps;
@ -238,36 +193,20 @@ public:
return !_vps.empty() && !_sps.empty() && !_pps.empty();
}
/**
* ,sps pps
* @param frame
*/
* ,sps pps
* @param frame
*/
void inputFrame(const Frame::Ptr &frame) override{
int type = H265_TYPE(*((uint8_t *)frame->data() + frame->prefixSize()));
if(frame->configFrame()){
bool first_frame = true;
splitH264(frame->data() + frame->prefixSize(),
frame->size() - frame->prefixSize(),
[&](const char *ptr, int len){
if(first_frame){
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame,
frame->data(),
len + frame->prefixSize(),
frame->prefixSize());
inputFrame_l(sub_frame);
first_frame = false;
}else{
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame,
(char *)ptr,
len ,
3);
inputFrame_l(sub_frame);
}
});
}else{
inputFrame_l(frame);
}
if(frame->configFrame() || type == H265Frame::NAL_SEI_PREFIX){
splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, int len, int prefix){
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame, (char*)ptr, len, prefix);
inputFrame_l(sub_frame);
});
} else {
inputFrame_l(frame);
}
}
private:
@ -367,47 +306,41 @@ private:
bool _last_frame_is_idr = false;
};
/**
* h265类型sdp
*/
class H265Sdp : public Sdp {
public:
/**
*
*
* @param sps 265 sps,0x00000001
* @param pps 265 pps,0x00000001
* @param playload_type rtp playload type 96
* @param payload_type rtp payload type 96
* @param bitrate
*/
H265Sdp(const string &strVPS,
const string &strSPS,
const string &strPPS,
int playload_type = 96,
int bitrate = 4000) : Sdp(90000,playload_type) {
int payload_type = 96,
int bitrate = 4000) : Sdp(90000,payload_type) {
//视频通道
_printer << "m=video 0 RTP/AVP " << playload_type << "\r\n";
_printer << "m=video 0 RTP/AVP " << payload_type << "\r\n";
_printer << "b=AS:" << bitrate << "\r\n";
_printer << "a=rtpmap:" << playload_type << " H265/" << 90000 << "\r\n";
_printer << "a=fmtp:" << playload_type << " ";
_printer << "a=rtpmap:" << payload_type << " H265/" << 90000 << "\r\n";
_printer << "a=fmtp:" << payload_type << " ";
_printer << "sprop-vps=";
_printer << encodeBase64(strVPS) << "; ";
_printer << "sprop-sps=";
_printer << encodeBase64(strSPS) << "; ";
_printer << "sprop-pps=";
_printer << encodeBase64(strPPS) << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n";
_printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
}
string getSdp() const override {
return _printer;
}
TrackType getTrackType() const override {
return TrackVideo;
}
CodecId getCodecId() const override {
return CodecH265;
}
@ -415,9 +348,5 @@ private:
_StrPrinter _printer;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_H265_H
#endif //ZLMEDIAKIT_H265_H

View File

@ -36,10 +36,6 @@ public:
*/
bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{
return CodecH265;
}

View File

@ -96,7 +96,7 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃
_h265frame->_buffer.clear();
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
WarnL << "rtp丢包: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false;
}
@ -140,12 +140,12 @@ void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) {
H265RtpEncoder::H265RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc,
ui32MtuSize,
ui32SampleRate,
ui8PlayloadType,
ui8PayloadType,
ui8Interleaved) {
}

View File

@ -39,10 +39,6 @@ public:
*/
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{
return CodecH265;
}
@ -67,13 +63,13 @@ public:
* @param ui32Ssrc ssrc
* @param ui32MtuSize mtu大小
* @param ui32SampleRate 90000
* @param ui8PlayloadType pt类型
* @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved
*/
H265RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize = 1400,
uint32_t ui32SampleRate = 90000,
uint8_t ui8PlayloadType = 96,
uint8_t ui8PayloadType = 96,
uint8_t ui8Interleaved = TrackVideo * 2);
~H265RtpEncoder() {}

23
src/Extension/Opus.cpp Normal file
View File

@ -0,0 +1,23 @@
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "Opus.h"
namespace mediakit{
Sdp::Ptr OpusTrack::getSdp() {
if(!ready()){
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<OpusSdp>(getAudioSampleRate(), getAudioChannel());
}
}//namespace mediakit

107
src/Extension/Opus.h Normal file
View File

@ -0,0 +1,107 @@
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ZLMEDIAKIT_OPUS_H
#define ZLMEDIAKIT_OPUS_H
#include "Frame.h"
#include "Track.h"
namespace mediakit{
/**
* Opus帧
*/
class OpusFrame : public FrameImp {
public:
typedef std::shared_ptr<OpusFrame> Ptr;
OpusFrame(){
_codecid = CodecOpus;
}
};
/**
* Opus帧
*/
class OpusFrameNoCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<OpusFrameNoCacheAble> Ptr;
OpusFrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts, uint32_t pts = 0,int prefix_size = 0){
_ptr = ptr;
_size = size;
_dts = dts;
_prefix_size = prefix_size;
}
CodecId getCodecId() const override{
return CodecOpus;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
};
/**
* Opus帧音频通道
*/
class OpusTrack : public AudioTrackImp{
public:
typedef std::shared_ptr<OpusTrack> Ptr;
OpusTrack(int sample_rate, int channels, int sample_bit) : AudioTrackImp(CodecOpus,sample_rate,channels,sample_bit){}
private:
//克隆该Track
Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
}
//生成sdp
Sdp::Ptr getSdp() override ;
};
/**
* Opus类型SDP
*/
class OpusSdp : public Sdp {
public:
/**
* opus sdp
* @param sample_rate
* @param payload_type rtp payload
* @param bitrate
*/
OpusSdp(int sample_rate,
int channels,
int payload_type = 98,
int bitrate = 128) : Sdp(sample_rate,payload_type){
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
_printer << "a=rtpmap:" << payload_type << " opus/" << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
}
string getSdp() const override {
return _printer;
}
CodecId getCodecId() const override {
return CodecOpus;
}
private:
_StrPrinter _printer;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_OPUS_H

View File

@ -65,8 +65,6 @@ class VideoTrack : public Track {
public:
typedef std::shared_ptr<VideoTrack> Ptr;
TrackType getTrackType() const override { return TrackVideo;};
/**
*
* @return
@ -93,8 +91,6 @@ class AudioTrack : public Track {
public:
typedef std::shared_ptr<AudioTrack> Ptr;
TrackType getTrackType() const override { return TrackAudio;};
/**
*
* @return
@ -114,6 +110,64 @@ public:
virtual int getAudioChannel() const {return 0;};
};
class AudioTrackImp : public AudioTrack{
public:
typedef std::shared_ptr<AudioTrackImp> Ptr;
/**
*
* @param codecId
* @param sample_rate (HZ)
* @param channels
* @param sample_bit 16
*/
AudioTrackImp(CodecId codecId,int sample_rate, int channels, int sample_bit){
_codecid = codecId;
_sample_rate = sample_rate;
_channels = channels;
_sample_bit = sample_bit;
}
/**
*
*/
CodecId getCodecId() const override{
return _codecid;
}
/**
*
*/
bool ready() override {
return true;
}
/**
*
*/
int getAudioSampleRate() const override{
return _sample_rate;
}
/**
* 168
*/
int getAudioSampleBit() const override{
return _sample_bit;
}
/**
*
*/
int getAudioChannel() const override{
return _channels;
}
private:
CodecId _codecid;
int _sample_rate;
int _channels;
int _sample_bit;
};
class TrackSource{
public:
@ -123,7 +177,6 @@ public:
/**
* Track
* @param trackReady Track
* @return
*/
virtual vector<Track::Ptr> getTracks(bool trackReady = true) const = 0;
@ -131,7 +184,6 @@ public:
* Track
* @param type track类型
* @param trackReady Track
* @return
*/
Track::Ptr getTrack(TrackType type , bool trackReady = true) const {
auto tracks = getTracks(trackReady);
@ -145,5 +197,4 @@ public:
};
}//namespace mediakit
#endif //ZLMEDIAKIT_TRACK_H
#endif //ZLMEDIAKIT_TRACK_H

139
src/Http/HlsParser.cpp Normal file
View File

@ -0,0 +1,139 @@
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include <cstdlib>
#include "HlsParser.h"
#include "Util/util.h"
#include "Common/Parser.h"
using namespace toolkit;
namespace mediakit {
bool HlsParser::parse(const string &http_url, const string &m3u8) {
float extinf_dur = 0;
ts_segment segment;
map<int, ts_segment> ts_map;
_total_dur = 0;
_is_live = true;
_is_m3u8_inner = false;
int index = 0;
auto lines = split(m3u8, "\n");
for (auto &line : lines) {
trim(line);
if (line.size() < 2) {
continue;
}
if ((_is_m3u8_inner || extinf_dur != 0) && line[0] != '#') {
segment.duration = extinf_dur;
if (line.find("http://") == 0 || line.find("https://") == 0) {
segment.url = line;
} else {
if (line.find("/") == 0) {
segment.url = http_url.substr(0, http_url.find("/", 8)) + line;
} else {
segment.url = http_url.substr(0, http_url.rfind("/") + 1) + line;
}
}
if (!_is_m3u8_inner) {
//ts按照先后顺序排序
ts_map.emplace(index++, segment);
} else {
//子m3u8按照带宽排序
ts_map.emplace(segment.bandwidth, segment);
}
extinf_dur = 0;
continue;
}
_is_m3u8_inner = false;
if (line.find("#EXTINF:") == 0) {
sscanf(line.data(), "#EXTINF:%f,", &extinf_dur);
_total_dur += extinf_dur;
continue;
}
static const string s_stream_inf = "#EXT-X-STREAM-INF:";
if (line.find(s_stream_inf) == 0) {
_is_m3u8_inner = true;
auto key_val = Parser::parseArgs(line.substr(s_stream_inf.size()), ",", "=");
segment.program_id = atoi(key_val["PROGRAM-ID"].data());
segment.bandwidth = atoi(key_val["BANDWIDTH"].data());
sscanf(key_val["RESOLUTION"].data(), "%dx%d", &segment.width, &segment.height);
continue;
}
if (line == "#EXTM3U") {
_is_m3u8 = true;
continue;
}
if (line.find("#EXT-X-ALLOW-CACHE:") == 0) {
_allow_cache = (line.find(":YES") != string::npos);
continue;
}
if (line.find("#EXT-X-VERSION:") == 0) {
sscanf(line.data(), "#EXT-X-VERSION:%d", &_version);
continue;
}
if (line.find("#EXT-X-TARGETDURATION:") == 0) {
sscanf(line.data(), "#EXT-X-TARGETDURATION:%d", &_target_dur);
continue;
}
if (line.find("#EXT-X-MEDIA-SEQUENCE:") == 0) {
sscanf(line.data(), "#EXT-X-MEDIA-SEQUENCE:%lld", &_sequence);
continue;
}
if (line.find("#EXT-X-ENDLIST") == 0) {
//点播
_is_live = false;
continue;
}
continue;
}
if (_is_m3u8) {
onParsed(_is_m3u8_inner, _sequence, ts_map);
}
return _is_m3u8;
}
bool HlsParser::isM3u8() const {
return _is_m3u8;
}
bool HlsParser::isLive() const{
return _is_live;
}
bool HlsParser::allowCache() const {
return _allow_cache;
}
int HlsParser::getVersion() const {
return _version;
}
int HlsParser::getTargetDur() const {
return _target_dur;
}
int HlsParser::getSequence() const {
return _sequence;
}
bool HlsParser::isM3u8Inner() const {
return _is_m3u8_inner;
}
}//namespace mediakit

95
src/Http/HlsParser.h Normal file
View File

@ -0,0 +1,95 @@
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef HTTP_HLSPARSER_H
#define HTTP_HLSPARSER_H
#include <string>
#include <list>
#include <map>
using namespace std;
namespace mediakit {
typedef struct{
//url地址
string url;
//ts切片长度
float duration;
//////内嵌m3u8//////
//节目id
int program_id;
//带宽
int bandwidth;
//宽度
int width;
//高度
int height;
} ts_segment;
class HlsParser {
public:
HlsParser(){}
~HlsParser(){}
bool parse(const string &http_url,const string &m3u8);
/**
* #EXTM3U字段m3u8文件
*/
bool isM3u8() const;
/**
* #EXT-X-ALLOW-CACHE值cache
*/
bool allowCache() const;
/**
* #EXT-X-ENDLIST字段
*/
bool isLive() const ;
/**
* #EXT-X-VERSION值
*/
int getVersion() const;
/**
* #EXT-X-TARGETDURATION字段值
*/
int getTargetDur() const;
/**
* #EXT-X-MEDIA-SEQUENCE字段值m3u8序号
*/
int getSequence() const;
/**
* m3u8
*/
bool isM3u8Inner() const;
protected:
//解析出ts文件地址回调
virtual void onParsed(bool is_m3u8_inner,int64_t sequence,const map<int,ts_segment> &ts_list) {};
private:
bool _is_m3u8 = false;
bool _allow_cache = false;
bool _is_live = true;
int _version = 0;
int _target_dur = 0;
float _total_dur = 0;
int64_t _sequence = 0;
//每部是否有m3u8
bool _is_m3u8_inner = false;
};
}//namespace mediakit
#endif //HTTP_HLSPARSER_H

317
src/Http/HlsPlayer.cpp Normal file
View File

@ -0,0 +1,317 @@
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "HlsPlayer.h"
namespace mediakit {
HlsPlayer::HlsPlayer(const EventPoller::Ptr &poller){
_segment.setOnSegment([this](const char *data, uint64_t len) { onPacket(data, len); });
_poller = poller ? poller : EventPollerPool::Instance().getPoller();
}
HlsPlayer::~HlsPlayer() {}
void HlsPlayer::play(const string &strUrl) {
_m3u8_list.emplace_back(strUrl);
play_l();
}
void HlsPlayer::play_l(){
if (_m3u8_list.empty()) {
teardown_l(SockException(Err_shutdown, "所有hls url都尝试播放失败!"));
return;
}
float playTimeOutSec = (*this)[Client::kTimeoutMS].as<int>() / 1000.0;
setMethod("GET");
if(!(*this)[kNetAdapter].empty()) {
setNetAdapter((*this)[kNetAdapter]);
}
sendRequest(_m3u8_list.back(), playTimeOutSec);
}
void HlsPlayer::teardown_l(const SockException &ex){
_timer.reset();
_timer_ts.reset();
_http_ts_player.reset();
shutdown(ex);
}
void HlsPlayer::teardown() {
teardown_l(SockException(Err_shutdown,"teardown"));
}
void HlsPlayer::playNextTs(bool force){
if (_ts_list.empty()) {
//播放列表为空那么立即重新下载m3u8文件
_timer.reset();
play_l();
return;
}
if (!force && _http_ts_player && _http_ts_player->alive()) {
//播放器目前还存活,正在下载中
return;
}
auto ts_duration = _ts_list.front().duration * 1000;
weak_ptr<HlsPlayer> weakSelf = dynamic_pointer_cast<HlsPlayer>(shared_from_this());
std::shared_ptr<Ticker> ticker(new Ticker);
_http_ts_player = std::make_shared<HttpTSPlayer>(getPoller(), false);
_http_ts_player->setOnDisconnect([weakSelf, ticker, ts_duration](const SockException &err) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
auto delay = ts_duration - 500 - ticker->elapsedTime();
if (delay <= 0) {
//播放这个ts切片花费了太长时间我们立即下一个切片的播放
strongSelf->playNextTs(true);
} else {
//下一个切片慢点播放
strongSelf->_timer_ts.reset(new Timer(delay / 1000.0, [weakSelf, delay]() {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
strongSelf->playNextTs(true);
return false;
}, strongSelf->getPoller()));
}
});
_http_ts_player->setOnPacket([weakSelf](const char *data, uint64_t len) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
//收到ts包
strongSelf->onPacket_l(data, len);
});
_http_ts_player->setMethod("GET");
if(!(*this)[kNetAdapter].empty()) {
_http_ts_player->setNetAdapter((*this)[Client::kNetAdapter]);
}
_http_ts_player->sendRequest(_ts_list.front().url, 2 * _ts_list.front().duration);
_ts_list.pop_front();
}
void HlsPlayer::onParsed(bool is_m3u8_inner,int64_t sequence,const map<int,ts_segment> &ts_map){
if (!is_m3u8_inner) {
//这是ts播放列表
if (_last_sequence == sequence) {
return;
}
_last_sequence = sequence;
for (auto &pr : ts_map) {
auto &ts = pr.second;
if (_ts_url_cache.emplace(ts.url).second) {
//该ts未重复
_ts_list.emplace_back(ts);
//按时间排序
_ts_url_sort.emplace_back(ts.url);
}
}
if (_ts_url_sort.size() > 2 * ts_map.size()) {
//去除防重列表中过多的数据
_ts_url_cache.erase(_ts_url_sort.front());
_ts_url_sort.pop_front();
}
playNextTs();
} else {
//这是m3u8列表,我们播放最高清的子hls
if (ts_map.empty()) {
teardown_l(SockException(Err_shutdown, StrPrinter << "empty sub hls list:" + getUrl()));
return;
}
_timer.reset();
weak_ptr<HlsPlayer> weakSelf = dynamic_pointer_cast<HlsPlayer>(shared_from_this());
auto url = ts_map.rbegin()->second.url;
getPoller()->async([weakSelf, url]() {
auto strongSelf = weakSelf.lock();
if (strongSelf) {
strongSelf->play(url);
}
}, false);
}
}
int64_t HlsPlayer::onResponseHeader(const string &status, const HttpClient::HttpHeader &headers) {
if (status != "200" && status != "206") {
//失败
teardown_l(SockException(Err_shutdown, StrPrinter << "bad http status code:" + status));
return 0;
}
auto contet_type = const_cast< HttpClient::HttpHeader &>(headers)["Content-Type"];
_is_m3u8 = (contet_type.find("application/vnd.apple.mpegurl") == 0);
return -1;
}
void HlsPlayer::onResponseBody(const char *buf, int64_t size, int64_t recvedSize, int64_t totalSize) {
if (recvedSize == size) {
//刚开始
_m3u8.clear();
}
_m3u8.append(buf, size);
}
void HlsPlayer::onResponseCompleted() {
if (HlsParser::parse(getUrl(), _m3u8)) {
playDelay();
if (_first) {
_first = false;
onPlayResult(SockException(Err_success, "play success"));
}
} else {
teardown_l(SockException(Err_shutdown, "解析m3u8文件失败"));
}
}
float HlsPlayer::delaySecond(){
if (HlsParser::isM3u8() && HlsParser::getTargetDur() > 0) {
return HlsParser::getTargetDur();
}
return 1;
}
void HlsPlayer::onDisconnect(const SockException &ex) {
if (_first) {
//第一次失败,则播放失败
_first = false;
onPlayResult(ex);
return;
}
//主动shutdown
if (ex.getErrCode() == Err_shutdown) {
if (_m3u8_list.size() <= 1) {
//全部url都播放失败
onShutdown(ex);
} else {
_m3u8_list.pop_back();
//还有上一级url可以重试播放
play_l();
}
return;
}
//eof等之后播放失败那么重试播放m3u8
playDelay();
}
bool HlsPlayer::onRedirectUrl(const string &url,bool temporary) {
_m3u8_list.emplace_back(url);
return true;
}
void HlsPlayer::playDelay(){
weak_ptr<HlsPlayer> weakSelf = dynamic_pointer_cast<HlsPlayer>(shared_from_this());
_timer.reset(new Timer(delaySecond(), [weakSelf]() {
auto strongSelf = weakSelf.lock();
if (strongSelf) {
strongSelf->play_l();
}
return false;
}, getPoller()));
}
void HlsPlayer::onPacket_l(const char *data, uint64_t len){
_segment.input(data,len);
}
//////////////////////////////////////////////////////////////////////////
HlsPlayerImp::HlsPlayerImp(const EventPoller::Ptr &poller) : PlayerImp<HlsPlayer, PlayerBase>(poller) {
}
void HlsPlayerImp::setOnPacket(const TSSegment::onSegment &cb){
_on_ts = cb;
}
void HlsPlayerImp::onPacket(const char *data,uint64_t len) {
if (_on_ts) {
_on_ts(data, len);
}
if (!_decoder) {
_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ts, this);
}
if (_decoder) {
_decoder->input((uint8_t *) data, len);
}
}
void HlsPlayerImp::onAllTrackReady() {
PlayerImp<HlsPlayer, PlayerBase>::onPlayResult(SockException(Err_success,"play hls success"));
}
void HlsPlayerImp::onPlayResult(const SockException &ex) {
if(ex){
PlayerImp<HlsPlayer, PlayerBase>::onPlayResult(ex);
}else{
_stamp[TrackAudio].syncTo(_stamp[TrackVideo]);
_ticker.resetTime();
weak_ptr<HlsPlayerImp> weakSelf = dynamic_pointer_cast<HlsPlayerImp>(shared_from_this());
//每50毫秒执行一次
_timer = std::make_shared<Timer>(0.05, [weakSelf]() {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
strongSelf->onTick();
return true;
}, getPoller());
}
}
void HlsPlayerImp::onShutdown(const SockException &ex) {
PlayerImp<HlsPlayer, PlayerBase>::onShutdown(ex);
_timer = nullptr;
}
vector<Track::Ptr> HlsPlayerImp::getTracks(bool trackReady) const {
return MediaSink::getTracks(trackReady);
}
void HlsPlayerImp::inputFrame(const Frame::Ptr &frame) {
//计算相对时间戳
int64_t dts, pts;
_stamp[frame->getTrackType()].revise(frame->dts(), frame->pts(), dts, pts);
//根据时间戳缓存frame
_frame_cache.emplace(dts, Frame::getCacheAbleFrame(frame));
while (!_frame_cache.empty()) {
if (_frame_cache.rbegin()->first - _frame_cache.begin()->first > 30 * 1000) {
//缓存超过30秒强制消费掉
MediaSink::inputFrame(_frame_cache.begin()->second);
_frame_cache.erase(_frame_cache.begin());
continue;
}
//缓存小于30秒
break;
}
}
void HlsPlayerImp::onTick() {
auto it = _frame_cache.begin();
while (it != _frame_cache.end()) {
if (it->first > _ticker.elapsedTime()) {
//这些帧还未到时间播放
break;
}
//消费掉已经到期的帧
MediaSink::inputFrame(it->second);
it = _frame_cache.erase(it);
}
}
}//namespace mediakit

151
src/Http/HlsPlayer.h Normal file
View File

@ -0,0 +1,151 @@
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef HTTP_HLSPLAYER_H
#define HTTP_HLSPLAYER_H
#include <unordered_set>
#include "Util/util.h"
#include "Poller/Timer.h"
#include "Http/HttpDownloader.h"
#include "Player/MediaPlayer.h"
#include "HlsParser.h"
#include "HttpTSPlayer.h"
#include "Rtp/Decoder.h"
#include "Rtp/TSDecoder.h"
using namespace toolkit;
namespace mediakit {
class HlsPlayer : public HttpClientImp , public PlayerBase , public HlsParser{
public:
HlsPlayer(const EventPoller::Ptr &poller);
~HlsPlayer() override;
/**
*
* @param strUrl
*/
void play(const string &strUrl) override;
/**
*
*/
void teardown() override;
protected:
/**
* ts包
* @param data ts数据负载
* @param len ts包长度
*/
virtual void onPacket(const char *data, uint64_t len) = 0;
private:
/**
* m3u8成功
* @param is_m3u8_inner m3u8列表
* @param sequence ts列表seq
* @param ts_map ts列表或m3u8列表
*/
void onParsed(bool is_m3u8_inner,int64_t sequence,const map<int,ts_segment> &ts_map) override;
/**
* http回复头
* @param status :200 OK
* @param headers http头
* @return content的长度-1:content>=0:content
* http头中带有Content-Length字段时
*/
int64_t onResponseHeader(const string &status,const HttpHeader &headers) override;
/**
* http conten数据
* @param buf
* @param size
* @param recvedSize (),totalSize时将触发onResponseCompleted回调
* @param totalSize
*/
void onResponseBody(const char *buf,int64_t size,int64_t recvedSize,int64_t totalSize) override;
/**
* http回复完毕,
*/
void onResponseCompleted() override;
/**
* http链接断开回调
* @param ex
*/
void onDisconnect(const SockException &ex) override;
/**
*
* @param url url
* @param temporary
* @return
*/
bool onRedirectUrl(const string &url,bool temporary) override;
private:
void playDelay();
float delaySecond();
void playNextTs(bool force = false);
void teardown_l(const SockException &ex);
void play_l();
void onPacket_l(const char *data, uint64_t len);
private:
struct UrlComp {
//url忽略后面的参数
bool operator()(const string& __x, const string& __y) const {
return split(__x,"?")[0] < split(__y,"?")[0];
}
};
private:
bool _is_m3u8 = false;
bool _first = true;
int64_t _last_sequence = -1;
string _m3u8;
Timer::Ptr _timer;
Timer::Ptr _timer_ts;
list<ts_segment> _ts_list;
list<string> _ts_url_sort;
list<string> _m3u8_list;
set<string, UrlComp> _ts_url_cache;
HttpTSPlayer::Ptr _http_ts_player;
TSSegment _segment;
};
class HlsPlayerImp : public PlayerImp<HlsPlayer, PlayerBase> , public MediaSink{
public:
typedef std::shared_ptr<HlsPlayerImp> Ptr;
HlsPlayerImp(const EventPoller::Ptr &poller = nullptr);
~HlsPlayerImp() override {};
void setOnPacket(const TSSegment::onSegment &cb);
private:
void onPacket(const char *data, uint64_t len) override;
void onAllTrackReady() override;
void onPlayResult(const SockException &ex) override;
vector<Track::Ptr> getTracks(bool trackReady = true) const override;
void inputFrame(const Frame::Ptr &frame) override;
void onShutdown(const SockException &ex) override;
void onTick();
private:
TSSegment::onSegment _on_ts;
DecoderImp::Ptr _decoder;
multimap<int64_t, Frame::Ptr> _frame_cache;
Timer::Ptr _timer;
Ticker _ticker;
Stamp _stamp[2];
};
}//namespace mediakit
#endif //HTTP_HLSPLAYER_H

View File

@ -48,8 +48,7 @@ public:
}
};
class HttpClient : public TcpClient , public HttpRequestSplitter
{
class HttpClient : public TcpClient , public HttpRequestSplitter{
public:
typedef StrCaseMap HttpHeader;
typedef std::shared_ptr<HttpClient> Ptr;

View File

@ -13,9 +13,7 @@
#include "HttpClient.h"
#include "Util/SSLBox.h"
using namespace toolkit;
namespace mediakit {
class HttpClientImp: public TcpClientWithSSL<HttpClient> {
@ -28,5 +26,4 @@ protected:
};
} /* namespace mediakit */
#endif /* SRC_HTTP_HTTPCLIENTIMP_H_ */

View File

@ -188,65 +188,65 @@ bool HttpSession::checkLiveFlvStream(const function<void()> &cb){
bool bClose = !strcasecmp(_parser["Connection"].data(),"close");
weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this());
MediaSource::findAsync(_mediaInfo,weakSelf.lock(),[weakSelf,bClose,this,cb](const MediaSource::Ptr &src){
//鉴权结果回调
auto onRes = [cb, weakSelf, bClose](const string &err){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
if (!strongSelf) {
//本对象已经销毁
return;
}
auto rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(src);
if(!rtmp_src){
//未找到该流
sendNotFound(bClose);
if(!err.empty()){
//播放鉴权失败
strongSelf->sendResponse("401 Unauthorized", bClose, nullptr, KeyValue(), std::make_shared<HttpStringBody>(err));
return;
}
//找到流了
auto onRes = [this,rtmp_src,cb](const string &err){
bool authSuccess = err.empty();
if(!authSuccess){
sendResponse("401 Unauthorized", true, nullptr, KeyValue(), std::make_shared<HttpStringBody>(err));
return ;
//异步查找rtmp流
MediaSource::findAsync(strongSelf->_mediaInfo, strongSelf, [weakSelf, bClose, cb](const MediaSource::Ptr &src) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
//本对象已经销毁
return;
}
auto rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(src);
if (!rtmp_src) {
//未找到该流
strongSelf->sendNotFound(bClose);
return;
}
if(!cb) {
if (!cb) {
//找到rtmp源发送http头负载后续发送
sendResponse("200 OK", false, "video/x-flv",KeyValue(),nullptr,true);
}else{
strongSelf->sendResponse("200 OK", false, "video/x-flv", KeyValue(), nullptr, true);
} else {
//自定义发送http头
cb();
}
//http-flv直播牺牲延时提升发送性能
setSocketFlags();
strongSelf->setSocketFlags();
strongSelf->start(strongSelf->getPoller(), rtmp_src);
strongSelf->_is_flv_stream = true;
});
};
try{
start(getPoller(),rtmp_src);
_is_flv_stream = true;
}catch (std::exception &ex){
//该rtmp源不存在
shutdown(SockException(Err_shutdown,"rtmp mediasource released"));
}
};
weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this());
Broadcast::AuthInvoker invoker = [weakSelf,onRes](const string &err){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
return;
}
strongSelf->async([weakSelf,onRes,err](){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
return;
}
onRes(err);
});
};
auto flag = NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastMediaPlayed,_mediaInfo,invoker,static_cast<SockInfo &>(*this));
if(!flag){
//该事件无人监听,默认不鉴权
onRes("");
Broadcast::AuthInvoker invoker = [weakSelf, onRes](const string &err) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
});
strongSelf->async([onRes, err]() {
onRes(err);
});
};
auto flag = NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastMediaPlayed, _mediaInfo, invoker, static_cast<SockInfo &>(*this));
if (!flag) {
//该事件无人监听,默认不鉴权
onRes("");
}
return true;
}

81
src/Http/HttpTSPlayer.cpp Normal file
View File

@ -0,0 +1,81 @@
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "HttpTSPlayer.h"
namespace mediakit {
HttpTSPlayer::HttpTSPlayer(const EventPoller::Ptr &poller, bool split_ts){
_segment.setOnSegment([this](const char *data, uint64_t len) { onPacket(data, len); });
_poller = poller ? poller : EventPollerPool::Instance().getPoller();
_split_ts = split_ts;
}
HttpTSPlayer::~HttpTSPlayer() {}
int64_t HttpTSPlayer::onResponseHeader(const string &status, const HttpClient::HttpHeader &headers) {
if (status != "200" && status != "206") {
//http状态码不符合预期
shutdown(SockException(Err_other, StrPrinter << "bad http status code:" + status));
return 0;
}
auto contet_type = const_cast< HttpClient::HttpHeader &>(headers)["Content-Type"];
if (contet_type.find("video/mp2t") == 0 || contet_type.find("video/mpeg") == 0) {
_is_ts_content = true;
}
//后续是不定长content
return -1;
}
void HttpTSPlayer::onResponseBody(const char *buf, int64_t size, int64_t recvedSize, int64_t totalSize) {
if (recvedSize == size) {
//开始接收数据
if (buf[0] == TS_SYNC_BYTE) {
//这是ts头
_is_first_packet_ts = true;
} else {
WarnL << "可能不是http-ts流";
}
}
if (_split_ts) {
_segment.input(buf, size);
} else {
onPacket(buf, size);
}
}
void HttpTSPlayer::onResponseCompleted() {
//接收完毕
shutdown(SockException(Err_success, "play completed"));
}
void HttpTSPlayer::onDisconnect(const SockException &ex) {
if (_on_disconnect) {
_on_disconnect(ex);
_on_disconnect = nullptr;
}
}
void HttpTSPlayer::onPacket(const char *data, uint64_t len) {
if (_on_segment) {
_on_segment(data, len);
}
}
void HttpTSPlayer::setOnDisconnect(const HttpTSPlayer::onShutdown &cb) {
_on_disconnect = cb;
}
void HttpTSPlayer::setOnPacket(const TSSegment::onSegment &cb) {
_on_segment = cb;
}
}//namespace mediakit

57
src/Http/HttpTSPlayer.h Normal file
View File

@ -0,0 +1,57 @@
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef HTTP_HTTPTSPLAYER_H
#define HTTP_HTTPTSPLAYER_H
#include "Http/HttpDownloader.h"
#include "Player/MediaPlayer.h"
#include "Rtp/TSDecoder.h"
using namespace toolkit;
namespace mediakit {
//http-ts播发器未实现ts解复用
class HttpTSPlayer : public HttpClientImp{
public:
typedef function<void(const SockException &)> onShutdown;
typedef std::shared_ptr<HttpTSPlayer> Ptr;
HttpTSPlayer(const EventPoller::Ptr &poller = nullptr, bool split_ts = true);
~HttpTSPlayer() override ;
//设置异常断开回调
void setOnDisconnect(const onShutdown &cb);
//设置接收ts包回调
void setOnPacket(const TSSegment::onSegment &cb);
protected:
///HttpClient override///
int64_t onResponseHeader(const string &status,const HttpHeader &headers) override;
void onResponseBody(const char *buf,int64_t size,int64_t recvedSize,int64_t totalSize) override;
void onResponseCompleted() override;
void onDisconnect(const SockException &ex) override ;
//收到ts包
virtual void onPacket(const char *data, uint64_t len);
private:
//是否为mpegts负载
bool _is_ts_content = false;
//第一个包是否为ts包
bool _is_first_packet_ts = false;
//是否判断是否是ts并split
bool _split_ts;
TSSegment _segment;
onShutdown _on_disconnect;
TSSegment::onSegment _on_segment;
};
}//namespace mediakit
#endif //HTTP_HTTPTSPLAYER_H

View File

@ -94,6 +94,20 @@ public:
_onRecv = nullptr;
sendRequest(http_url,fTimeOutSec);
}
void closeWsClient(){
if(!_onRecv){
//未连接
return;
}
WebSocketHeader header;
header._fin = true;
header._reserved = 0;
header._opcode = CLOSE;
//客户端需要加密
header._mask_flag = true;
WebSocketSplitter::encode(header, nullptr);
}
protected:
//HttpClientImp override
@ -110,7 +124,8 @@ protected:
if(Sec_WebSocket_Accept == const_cast<HttpHeader &>(headers)["Sec-WebSocket-Accept"]){
//success
onWebSocketException(SockException());
return 0;
//后续全是websocket负载数据
return -1;
}
shutdown(SockException(Err_shutdown,StrPrinter << "Sec-WebSocket-Accept mismatch"));
return 0;
@ -125,6 +140,16 @@ protected:
*/
void onResponseCompleted() override {}
/**
* websocket负载数据
*/
void onResponseBody(const char *buf,int64_t size,int64_t recvedSize,int64_t totalSize) override{
if(_onRecv){
//完成websocket握手后拦截websocket数据并解析
_onRecv(buf, size);
}
};
//TcpClient override
/**
@ -167,20 +192,6 @@ protected:
HttpClientImp::onConnect(ex);
}
/**
* tcp收到数据
* @param pBuf
*/
void onRecv(const Buffer::Ptr &pBuf) override{
if(_onRecv){
//完成websocket握手后拦截websocket数据并解析
_onRecv(pBuf);
}else{
//websocket握手数据
HttpClientImp::onRecv(pBuf);
}
}
/**
* tcp连接断开
* @param ex
@ -193,7 +204,7 @@ protected:
//WebSocketSplitter override
/**
* webSocket数据包包头onWebSocketDecodePlayload回调
* webSocket数据包包头onWebSocketDecodePayload回调
* @param header
*/
void onWebSocketDecodeHeader(const WebSocketHeader &header) override{
@ -205,9 +216,9 @@ protected:
* @param header
* @param ptr
* @param len
* @param recved ()header._playload_len时则接受完毕
* @param recved ()header._payload_len时则接受完毕
*/
void onWebSocketDecodePlayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) override{
void onWebSocketDecodePayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) override{
_payload.append((char *)ptr,len);
}
@ -285,9 +296,9 @@ private:
//触发连接成功事件
_delegate.onConnect(ex);
//拦截websocket数据接收
_onRecv = [this](const Buffer::Ptr &pBuf){
_onRecv = [this](const char *data, int len){
//解析websocket数据包
this->WebSocketSplitter::decode((uint8_t*)pBuf->data(),pBuf->size());
this->WebSocketSplitter::decode((uint8_t *)data, len);
};
return;
}
@ -306,7 +317,7 @@ private:
private:
string _Sec_WebSocket_Key;
function<void(const Buffer::Ptr &pBuf)> _onRecv;
function<void(const char *data, int len)> _onRecv;
ClientTypeImp<ClientType,DataType> &_delegate;
string _payload;
};
@ -328,7 +339,9 @@ public:
WebSocketClient(ArgsType &&...args) : ClientTypeImp<ClientType,DataType>(std::forward<ArgsType>(args)...){
_wsClient.reset(new HttpWsClient<ClientType,DataType>(*this));
}
~WebSocketClient() override {}
~WebSocketClient() override {
_wsClient->closeWsClient();
}
/**
* startConnect方法

View File

@ -161,7 +161,7 @@ protected:
* @param len
* @param recved
*/
void onWebSocketDecodePlayload(const WebSocketHeader &packet,const uint8_t *ptr,uint64_t len,uint64_t recved) override {
void onWebSocketDecodePayload(const WebSocketHeader &packet,const uint8_t *ptr,uint64_t len,uint64_t recved) override {
_remian_data.append((char *)ptr,len);
}
@ -205,7 +205,7 @@ protected:
* @param buffer
*/
void onWebSocketEncodeData(const Buffer::Ptr &buffer) override{
SocketHelper::send(buffer);
HttpSessionType::send(buffer);
}
private:
string _remian_data;

View File

@ -72,16 +72,16 @@ begin_decode:
CHECK_LEN(1);
_mask_flag = (*ptr & 0x80) >> 7;
_playload_len = (*ptr & 0x7F);
_payload_len = (*ptr & 0x7F);
ptr += 1;
if (_playload_len == 126) {
if (_payload_len == 126) {
CHECK_LEN(2);
_playload_len = (*ptr << 8) | *(ptr + 1);
_payload_len = (*ptr << 8) | *(ptr + 1);
ptr += 2;
} else if (_playload_len == 127) {
} else if (_payload_len == 127) {
CHECK_LEN(8);
_playload_len = ((uint64_t) ptr[0] << (8 * 7)) |
_payload_len = ((uint64_t) ptr[0] << (8 * 7)) |
((uint64_t) ptr[1] << (8 * 6)) |
((uint64_t) ptr[2] << (8 * 5)) |
((uint64_t) ptr[3] << (8 * 4)) |
@ -98,9 +98,9 @@ begin_decode:
}
_got_header = true;
_mask_offset = 0;
_playload_offset = 0;
_payload_offset = 0;
onWebSocketDecodeHeader(*this);
if(_playload_len == 0){
if(_payload_len == 0){
onWebSocketDecodeComplete(*this);
}
}
@ -109,19 +109,19 @@ begin_decode:
uint64_t remain = len - (ptr - data);
if(remain > 0){
uint64_t playload_slice_len = remain;
if(playload_slice_len + _playload_offset > _playload_len){
playload_slice_len = _playload_len - _playload_offset;
uint64_t payload_slice_len = remain;
if(payload_slice_len + _payload_offset > _payload_len){
payload_slice_len = _payload_len - _payload_offset;
}
_playload_offset += playload_slice_len;
onPlayloadData(ptr,playload_slice_len);
_payload_offset += payload_slice_len;
onPayloadData(ptr, payload_slice_len);
if(_playload_offset == _playload_len){
if(_payload_offset == _payload_len){
onWebSocketDecodeComplete(*this);
//这是下一个包
remain -= playload_slice_len;
ptr += playload_slice_len;
remain -= payload_slice_len;
ptr += payload_slice_len;
_got_header = false;
if(remain > 0){
@ -138,14 +138,14 @@ begin_decode:
_remain_data.clear();
}
void WebSocketSplitter::onPlayloadData(uint8_t *ptr, uint64_t len) {
void WebSocketSplitter::onPayloadData(uint8_t *data, uint64_t len) {
if(_mask_flag){
for(int i = 0; i < len ; ++i,++ptr){
*(ptr) ^= _mask[(i + _mask_offset) % 4];
for(int i = 0; i < len ; ++i,++data){
*(data) ^= _mask[(i + _mask_offset) % 4];
}
_mask_offset = (_mask_offset + len) % 4;
}
onWebSocketDecodePlayload(*this, _mask_flag ? ptr - len : ptr, len, _playload_offset);
onWebSocketDecodePayload(*this, _mask_flag ? data - len : data, len, _payload_offset);
}
void WebSocketSplitter::encode(const WebSocketHeader &header,const Buffer::Ptr &buffer) {

View File

@ -44,14 +44,19 @@ public:
CONTROL_RSVF = 0xF
} Type;
public:
WebSocketHeader() : _mask(4){}
WebSocketHeader() : _mask(4){
//获取_mask内部buffer的内存地址该内存是malloc开辟的地址为随机
uint64_t ptr = (uint64_t)(&_mask[0]);
//根据内存地址设置掩码随机数
_mask.assign((uint8_t*)(&ptr), (uint8_t*)(&ptr) + 4);
}
virtual ~WebSocketHeader(){}
public:
bool _fin;
uint8_t _reserved;
Type _opcode;
bool _mask_flag;
uint64_t _playload_len;
uint64_t _payload_len;
vector<uint8_t > _mask;
};
@ -62,7 +67,7 @@ public:
/**
* 便webSocket数据以及处理粘包问题
* onWebSocketDecodeHeader和onWebSocketDecodePlayload回调
* onWebSocketDecodeHeader和onWebSocketDecodePayload回调
* @param data
* @param len
*/
@ -77,7 +82,7 @@ public:
void encode(const WebSocketHeader &header,const Buffer::Ptr &buffer);
protected:
/**
* webSocket数据包包头onWebSocketDecodePlayload回调
* webSocket数据包包头onWebSocketDecodePayload回调
* @param header
*/
virtual void onWebSocketDecodeHeader(const WebSocketHeader &header) {};
@ -87,9 +92,9 @@ protected:
* @param header
* @param ptr
* @param len
* @param recved ()header._playload_len时则接受完毕
* @param recved ()header._payload_len时则接受完毕
*/
virtual void onWebSocketDecodePlayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) {};
virtual void onWebSocketDecodePayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) {};
/**
@ -105,12 +110,12 @@ protected:
*/
virtual void onWebSocketEncodeData(const Buffer::Ptr &buffer){};
private:
void onPlayloadData(uint8_t *data,uint64_t len);
void onPayloadData(uint8_t *data, uint64_t len);
private:
string _remain_data;
int _mask_offset = 0;
bool _got_header = false;
uint64_t _playload_offset = 0;
uint64_t _payload_offset = 0;
};
} /* namespace mediakit */

View File

@ -12,18 +12,31 @@
#include "PlayerBase.h"
#include "Rtsp/RtspPlayerImp.h"
#include "Rtmp/RtmpPlayerImp.h"
#include "Http/HlsPlayer.h"
using namespace toolkit;
namespace mediakit {
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const string &strUrl) {
//字符串是否以xx结尾
static bool end_of(const string &str, const string &substr){
auto pos = str.rfind(substr);
return pos != string::npos && pos == str.size() - substr.size();
}
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const string &url_in) {
static auto releasePlayer = [](PlayerBase *ptr){
onceToken token(nullptr,[&](){
delete ptr;
});
ptr->teardown();
};
string prefix = FindField(strUrl.data(), NULL, "://");
string url = url_in;
string prefix = FindField(url.data(), NULL, "://");
auto pos = url.find('?');
if (pos != string::npos) {
//去除?后面的字符串
url = url.substr(0, pos);
}
if (strcasecmp("rtsps",prefix.data()) == 0) {
return PlayerBase::Ptr(new TcpClientWithSSL<RtspPlayerImp>(poller),releasePlayer);
@ -41,6 +54,10 @@ PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const st
return PlayerBase::Ptr(new RtmpPlayerImp(poller),releasePlayer);
}
if ((strcasecmp("http",prefix.data()) == 0 || strcasecmp("https",prefix.data()) == 0) && end_of(url, ".m3u8")) {
return PlayerBase::Ptr(new HlsPlayerImp(poller),releasePlayer);
}
return PlayerBase::Ptr(new RtspPlayerImp(poller),releasePlayer);
}

View File

@ -13,8 +13,6 @@ namespace mediakit {
HlsMaker::HlsMaker(float seg_duration, uint32_t seg_number) {
//最小允许设置为00个切片代表点播
seg_number = MAX(0,seg_number);
seg_duration = MAX(1,seg_duration);
_seg_number = seg_number;
_seg_duration = seg_duration;
}
@ -34,6 +32,8 @@ void HlsMaker::makeIndexFile(bool eof) {
}
}
auto sequence = _seg_number ? (_file_index > _seg_number ? _file_index - _seg_number : 0LL) : 0LL;
string m3u8;
snprintf(file_content,sizeof(file_content),
"#EXTM3U\n"
@ -42,7 +42,7 @@ void HlsMaker::makeIndexFile(bool eof) {
"#EXT-X-TARGETDURATION:%u\n"
"#EXT-X-MEDIA-SEQUENCE:%llu\n",
(maxSegmentDuration + 999) / 1000,
_seg_number ? _file_index : 0);
sequence);
m3u8.assign(file_content);

View File

@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).

View File

@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).

View File

@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).

View File

@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).

View File

@ -122,27 +122,68 @@ void MP4Muxer::inputFrame(const Frame::Ptr &frame) {
}
}
static uint8_t getObject(CodecId codecId){
switch (codecId){
case CodecG711A : return MOV_OBJECT_G711a;
case CodecG711U : return MOV_OBJECT_G711u;
case CodecOpus : return MOV_OBJECT_OPUS;
case CodecAAC : return MOV_OBJECT_AAC;
case CodecH264 : return MOV_OBJECT_H264;
case CodecH265 : return MOV_OBJECT_HEVC;
default : return 0;
}
}
void MP4Muxer::stampSync(){
if(_codec_to_trackid.size() < 2){
return;
}
Stamp *audio = nullptr, *video = nullptr;
for(auto &pr : _codec_to_trackid){
switch (getTrackType((CodecId) pr.first)){
case TrackAudio : audio = &pr.second.stamp; break;
case TrackVideo : video = &pr.second.stamp; break;
default : break;
}
}
if(audio && video){
//音频时间戳同步于视频,因为音频时间戳被修改后不影响播放
audio->syncTo(*video);
}
}
void MP4Muxer::addTrack(const Track::Ptr &track) {
auto mp4_object = getObject(track->getCodecId());
if (!mp4_object) {
WarnL << "MP4录制不支持该编码格式:" << track->getCodecName();
return;
}
if (!track->ready()) {
WarnL << "Track[" << track->getCodecName() << "]未就绪";
return;
}
switch (track->getCodecId()) {
case CodecG711A:
case CodecG711U: {
auto audio_track = dynamic_pointer_cast<G711Track>(track);
case CodecG711U:
case CodecOpus: {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if (!audio_track) {
WarnL << "不是G711 Track";
return;
}
if (!audio_track->ready()) {
WarnL << "G711 Track未就绪";
WarnL << "不是音频Track:" << track->getCodecName();
return;
}
auto track_id = mov_writer_add_audio(_mov_writter.get(),
track->getCodecId() == CodecG711A ? MOV_OBJECT_G711a : MOV_OBJECT_G711u,
mp4_object,
audio_track->getAudioChannel(),
audio_track->getAudioSampleBit() * audio_track->getAudioChannel(),
audio_track->getAudioSampleRate(),
nullptr, 0);
if (track_id < 0) {
WarnL << "添加G711 Track失败:" << track_id;
WarnL << "添加Track[" << track->getCodecName() << "]失败:" << track_id;
return;
}
_codec_to_trackid[track->getCodecId()].track_id = track_id;
@ -155,16 +196,14 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
WarnL << "不是AAC Track";
return;
}
if(!audio_track->ready()){
WarnL << "AAC Track未就绪";
return;
}
auto track_id = mov_writer_add_audio(_mov_writter.get(),
MOV_OBJECT_AAC,
mp4_object,
audio_track->getAudioChannel(),
audio_track->getAudioSampleBit() * audio_track->getAudioChannel(),
audio_track->getAudioSampleRate(),
audio_track->getAacCfg().data(), 2);
audio_track->getAacCfg().data(),
audio_track->getAacCfg().size());
if(track_id < 0){
WarnL << "添加AAC Track失败:" << track_id;
return;
@ -178,10 +217,6 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
WarnL << "不是H264 Track";
return;
}
if(!h264_track->ready()){
WarnL << "H264 Track未就绪";
return;
}
struct mpeg4_avc_t avc = {0};
string sps_pps = string("\x00\x00\x00\x01", 4) + h264_track->getSps() +
@ -196,7 +231,7 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
}
auto track_id = mov_writer_add_video(_mov_writter.get(),
MOV_OBJECT_H264,
mp4_object,
h264_track->getVideoWidth(),
h264_track->getVideoHeight(),
extra_data,
@ -216,10 +251,6 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
WarnL << "不是H265 Track";
return;
}
if(!h265_track->ready()){
WarnL << "H265 Track未就绪";
return;
}
struct mpeg4_hevc_t hevc = {0};
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + h265_track->getVps() +
@ -235,7 +266,7 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
}
auto track_id = mov_writer_add_video(_mov_writter.get(),
MOV_OBJECT_HEVC,
mp4_object,
h265_track->getVideoWidth(),
h265_track->getVideoHeight(),
extra_data,
@ -248,10 +279,12 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
_have_video = true;
}
break;
default:
WarnL << "MP4录制不支持该编码格式:" << track->getCodecName();
break;
default: WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); break;
}
//尝试音视频同步
stampSync();
}
}//namespace mediakit

View File

@ -45,13 +45,14 @@ public:
private:
void openMP4();
void closeMP4();
void stampSync();
private:
struct track_info{
struct track_info {
int track_id = -1;
Stamp stamp;
};
unordered_map<int,track_info> _codec_to_trackid;
unordered_map<int, track_info> _codec_to_trackid;
List<Frame::Ptr> _frameCached;
bool _started = false;
bool _have_video = false;

View File

@ -80,11 +80,11 @@ std::shared_ptr<MediaSinkInterface> Recorder::createRecorder(type type, const st
}
static MediaSource::Ptr getMediaSource(const string &vhost, const string &app, const string &stream_id){
auto src = MediaSource::find(RTMP_SCHEMA, vhost, app, stream_id, false);
auto src = MediaSource::find(RTMP_SCHEMA, vhost, app, stream_id);
if(src){
return src;
}
return MediaSource::find(RTSP_SCHEMA, vhost, app, stream_id, false);
return MediaSource::find(RTSP_SCHEMA, vhost, app, stream_id);
}
bool Recorder::isRecording(type type, const string &vhost, const string &app, const string &stream_id){

View File

@ -23,6 +23,26 @@ TsMuxer::~TsMuxer() {
uninit();
}
void TsMuxer::stampSync(){
if(_codec_to_trackid.size() < 2){
return;
}
Stamp *audio = nullptr, *video = nullptr;
for(auto &pr : _codec_to_trackid){
switch (getTrackType((CodecId) pr.first)){
case TrackAudio : audio = &pr.second.stamp; break;
case TrackVideo : video = &pr.second.stamp; break;
default : break;
}
}
if(audio && video){
//音频时间戳同步于视频,因为音频时间戳被修改后不影响播放
audio->syncTo(*video);
}
}
void TsMuxer::addTrack(const Track::Ptr &track) {
switch (track->getCodecId()) {
case CodecH264: {
@ -52,9 +72,11 @@ void TsMuxer::addTrack(const Track::Ptr &track) {
break;
}
default:
break;
default: WarnL << "mpeg-ts 不支持该编码格式,已忽略:" << track->getCodecName(); break;
}
//尝试音视频同步
stampSync();
}
void TsMuxer::inputFrame(const Frame::Ptr &frame) {

View File

@ -17,37 +17,59 @@
#include "Util/File.h"
#include "Common/MediaSink.h"
#include "Common/Stamp.h"
using namespace toolkit;
namespace mediakit {
//该类用于产生MPEG-TS
class TsMuxer : public MediaSinkInterface {
public:
TsMuxer();
virtual ~TsMuxer();
/**
*
*/
void addTrack(const Track::Ptr &track) override;
/**
*
*/
void resetTracks() override;
/**
*
*/
void inputFrame(const Frame::Ptr &frame) override;
protected:
/**
* mpegts数据回调
* @param packet mpegts数据
* @param bytes mpegts数据长度
* @param timestamp
* @param is_idr_fast_packet TS包ts切片第一帧为关键帧
*/
virtual void onTs(const void *packet, int bytes,uint32_t timestamp,bool is_idr_fast_packet) = 0;
private:
void init();
void uninit();
private:
void *_context = nullptr;
char *_tsbuf[188];
uint32_t _timestamp = 0;
//音视频时间戳同步用
void stampSync();
struct track_info{
private:
void *_context = nullptr;
char _tsbuf[188];
uint32_t _timestamp = 0;
struct track_info {
int track_id = -1;
Stamp stamp;
};
unordered_map<int,track_info> _codec_to_trackid;
unordered_map<int, track_info> _codec_to_trackid;
List<Frame::Ptr> _frameCached;
bool _is_idr_fast_packet = false;
bool _have_video = false;
};
}//namespace mediakit
#endif //TSMUXER_H
#endif //TSMUXER_H

View File

@ -50,6 +50,9 @@ void FlvMuxer::start(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &
}
strongSelf->onDetach();
});
//音频同步于视频
_stamp[0].syncTo(_stamp[1]);
_ring_reader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
@ -164,7 +167,7 @@ void FlvMuxer::stop() {
///////////////////////////////////////////////////////FlvRecorder/////////////////////////////////////////////////////
void FlvRecorder::startRecord(const EventPoller::Ptr &poller,const string &vhost, const string &app, const string &stream,const string &file_path) {
startRecord(poller,dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA,vhost,app,stream,false)),file_path);
startRecord(poller,dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA,vhost,app,stream)),file_path);
}
void FlvRecorder::startRecord(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media, const string &file_path) {

View File

@ -100,4 +100,23 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track){
}
void Metadata::addTrack(AMFValue &metadata, const Track::Ptr &track) {
Metadata::Ptr new_metadata;
switch (track->getTrackType()) {
case TrackVideo: {
new_metadata = std::make_shared<VideoMeta>(dynamic_pointer_cast<VideoTrack>(track));
}
break;
case TrackAudio: {
new_metadata = std::make_shared<AudioMeta>(dynamic_pointer_cast<AudioTrack>(track));
}
break;
default:
return;
}
new_metadata->getMetadata().object_for_each([&](const std::string &key, const AMFValue &value) {
metadata.set(key, value);
});
}
}//namespace mediakit

View File

@ -220,6 +220,8 @@ public:
const AMFValue &getMetadata() const{
return _metadata;
}
static void addTrack(AMFValue &metadata, const Track::Ptr &track);
protected:
AMFValue _metadata;
};
@ -242,18 +244,6 @@ public:
}
}
/**
*
* @return
*/
TrackType getTrackType() const override {
return TrackTitle;
}
/**
* id
* @return
*/
CodecId getCodecId() const override{
return CodecInvalid;
}
@ -266,18 +256,6 @@ public:
VideoMeta(const VideoTrack::Ptr &video,int datarate = 5000);
virtual ~VideoMeta(){}
/**
*
* @return
*/
TrackType getTrackType() const override {
return TrackVideo;
}
/**
* id
* @return
*/
CodecId getCodecId() const override{
return _codecId;
}
@ -285,7 +263,6 @@ private:
CodecId _codecId;
};
class AudioMeta : public Metadata{
public:
typedef std::shared_ptr<AudioMeta> Ptr;
@ -294,18 +271,6 @@ public:
virtual ~AudioMeta(){}
/**
*
* @return
*/
TrackType getTrackType() const override {
return TrackAudio;
}
/**
* id
* @return
*/
CodecId getCodecId() const override{
return _codecId;
}
@ -317,7 +282,4 @@ private:
uint8_t getAudioRtmpFlags(const Track::Ptr &track);
}//namespace mediakit
#endif
#endif//__rtmp_h

View File

@ -13,60 +13,56 @@
namespace mediakit {
void RtmpDemuxer::loadMetaData(const AMFValue &val){
bool RtmpDemuxer::loadMetaData(const AMFValue &val){
bool ret = false;
try {
int audiosamplerate = 0;
int audiochannels = 0;
int audiosamplesize = 0;
const AMFValue *audiocodecid = nullptr;
const AMFValue *videocodecid = nullptr;
val.object_for_each([&](const string &key, const AMFValue &val) {
if (key == "duration") {
_fDuration = val.as_number();
return;
}
if(key == "audiosamplerate"){
if (key == "audiosamplerate") {
audiosamplerate = val.as_integer();
return;
}
if(key == "audiosamplesize"){
if (key == "audiosamplesize") {
audiosamplesize = val.as_integer();
return;
}
if(key == "stereo"){
if (key == "stereo") {
audiochannels = val.as_boolean() ? 2 : 1;
return;
}
if(key == "videocodecid"){
if (key == "videocodecid") {
//找到视频
videocodecid = &val;
return;
}
if(key == "audiocodecid"){
if (key == "audiocodecid") {
//找到音频
audiocodecid = &val;
return;
}
});
if(videocodecid){
if (videocodecid) {
//有视频
ret = true;
makeVideoTrack(*videocodecid);
}
if(audiocodecid){
if (audiocodecid) {
//有音频
ret = true;
makeAudioTrack(*audiocodecid, audiosamplerate, audiochannels, audiosamplesize);
}
}catch (std::exception &ex){
} catch (std::exception &ex) {
WarnL << ex.what();
}
return ret;
}
bool RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
@ -105,12 +101,11 @@ void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec) {
_videoTrack = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec));
if (_videoTrack) {
//生成rtmpCodec对象以便解码rtmp
_videoRtmpDecoder = Factory::getRtmpCodecByTrack(_videoTrack);
_videoRtmpDecoder = Factory::getRtmpCodecByTrack(_videoTrack, false);
if (_videoRtmpDecoder) {
//设置rtmp解码器代理生成的frame写入该Track
_videoRtmpDecoder->addDelegate(_videoTrack);
onAddTrack(_videoTrack);
_tryedGetVideoTrack = true;
} else {
//找不到相应的rtmp解码器该track无效
_videoTrack.reset();
@ -123,12 +118,11 @@ void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int
_audioTrack = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit));
if (_audioTrack) {
//生成rtmpCodec对象以便解码rtmp
_audioRtmpDecoder = Factory::getRtmpCodecByTrack(_audioTrack);
_audioRtmpDecoder = Factory::getRtmpCodecByTrack(_audioTrack, false);
if (_audioRtmpDecoder) {
//设置rtmp解码器代理生成的frame写入该Track
_audioRtmpDecoder->addDelegate(_audioTrack);
onAddTrack(_audioTrack);
_tryedGetAudioTrack = true;
} else {
//找不到相应的rtmp解码器该track无效
_audioTrack.reset();

View File

@ -30,7 +30,7 @@ public:
RtmpDemuxer() = default;
virtual ~RtmpDemuxer() = default;
void loadMetaData(const AMFValue &metadata);
bool loadMetaData(const AMFValue &metadata);
/**
*

View File

@ -33,9 +33,6 @@ using namespace toolkit;
#define RTMP_GOP_SIZE 512
namespace mediakit {
typedef VideoPacketCache<RtmpPacket> RtmpVideoCache;
typedef AudioPacketCache<RtmpPacket> RtmpAudioCache;
/**
* rtmp媒体源的数据抽象
* rtmp有关键的三要素metadataconfig帧
@ -43,7 +40,7 @@ typedef AudioPacketCache<RtmpPacket> RtmpAudioCache;
* rtmp推流rtmp服务器就很简单了
* rtmp推拉流协议中metadataconfig帧
*/
class RtmpMediaSource : public MediaSource, public RingDelegate<RtmpPacket::Ptr>, public RtmpVideoCache, public RtmpAudioCache{
class RtmpMediaSource : public MediaSource, public RingDelegate<RtmpPacket::Ptr>, public PacketCache<RtmpPacket>{
public:
typedef std::shared_ptr<RtmpMediaSource> Ptr;
typedef std::shared_ptr<List<RtmpPacket::Ptr> > RingDataType;
@ -110,6 +107,14 @@ public:
}
}
/**
* metadata
*/
void updateMetaData(const AMFValue &metadata) {
lock_guard<recursive_mutex> lock(_mtx);
_metadata = metadata;
}
/**
* rtmp包
* @param pkt rtmp包
@ -149,12 +154,7 @@ public:
regist();
}
}
if(pkt->typeId == MSG_VIDEO){
RtmpVideoCache::inputVideo(pkt, key);
}else{
RtmpAudioCache::inputAudio(pkt);
}
PacketCache<RtmpPacket>::inputPacket(pkt->typeId == MSG_VIDEO, pkt, key);
}
/**
@ -175,21 +175,13 @@ public:
private:
/**
* flush时间戳相同的视频rtmp包时触发该函数
* @param rtmp_list rtmp包列表
* flush rtmp包时触发该函数
* @param rtmp_list rtmp包列表
* @param key_pos
*/
void onFlushVideo(std::shared_ptr<List<RtmpPacket::Ptr> > &rtmp_list, bool key_pos) override {
_ring->write(rtmp_list, key_pos);
}
/**
* flush一定数量的音频rtmp包时触发该函数
* @param rtmp_list rtmp包列表
*/
void onFlushAudio(std::shared_ptr<List<RtmpPacket::Ptr> > &rtmp_list) override{
//只有音频的话就不存在gop缓存的意义
_ring->write(rtmp_list, !_have_video);
void onFlush(std::shared_ptr<List<RtmpPacket::Ptr> > &rtmp_list, bool key_pos) override {
//如果不存在视频那么就没有存在GOP缓存的意义所以is_key一直为true确保一直清空GOP缓存
_ring->write(rtmp_list, _have_video ? key_pos : true);
}
/**

View File

@ -49,7 +49,11 @@ public:
* metadata
*/
void setMetaData(const AMFValue &metadata) override{
_demuxer->loadMetaData(metadata);
if(!_demuxer->loadMetaData(metadata)){
//该metadata无效需要重新生成
_metadata = metadata;
_recreate_metadata = true;
}
RtmpMediaSource::setMetaData(metadata);
}
@ -146,11 +150,22 @@ public:
void onAllTrackReady() override{
setTrackSource(_muxer);
_all_track_ready = true;
if (_recreate_metadata) {
//更新metadata
for (auto &track : _muxer->getTracks()) {
Metadata::addTrack(_metadata, track);
}
RtmpMediaSource::updateMetaData(_metadata);
}
}
private:
RtmpDemuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
AMFValue _metadata;
bool _all_track_ready = false;
bool _recreate_metadata = false;
};
} /* namespace mediakit */

View File

@ -23,47 +23,9 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) {
}
void RtmpMuxer::addTrack(const Track::Ptr &track) {
//根据track生产metadata
Metadata::Ptr metadata;
switch (track->getTrackType()){
case TrackVideo:{
metadata = std::make_shared<VideoMeta>(dynamic_pointer_cast<VideoTrack>(track));
}
break;
case TrackAudio:{
metadata = std::make_shared<AudioMeta>(dynamic_pointer_cast<AudioTrack>(track));
}
break;
default:
return;
}
switch (track->getCodecId()){
case CodecG711A:
case CodecG711U:{
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if(!audio_track){
return;
}
if (audio_track->getAudioSampleRate() != 8000 ||
audio_track->getAudioChannel() != 1 ||
audio_track->getAudioSampleBit() != 16) {
WarnL << "RTMP只支持8000/1/16规格的G711,目前规格是:"
<< audio_track->getAudioSampleRate() << "/"
<< audio_track->getAudioChannel() << "/"
<< audio_track->getAudioSampleBit()
<< ",该音频已被忽略";
return;
}
break;
}
default : break;
}
auto &encoder = _encoder[track->getTrackType()];
//生成rtmp编码器,克隆该Track防止循环引用
encoder = Factory::getRtmpCodecByTrack(track->clone());
encoder = Factory::getRtmpCodecByTrack(track->clone(), true);
if (!encoder) {
return;
}
@ -71,10 +33,8 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) {
//设置rtmp输出环形缓存
encoder->setRtmpRing(_rtmpRing);
//添加其metadata
metadata->getMetadata().object_for_each([&](const std::string &key, const AMFValue &value){
_metadata.set(key,value);
});
//添加metadata
Metadata::addTrack(_metadata,track);
}
void RtmpMuxer::inputFrame(const Frame::Ptr &frame) {

View File

@ -130,8 +130,7 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) {
auto src = dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA,
_mediaInfo._vhost,
_mediaInfo._app,
_mediaInfo._streamid,
false));
_mediaInfo._streamid));
bool authSuccess = err.empty();
bool ok = (!src && !_pPublisherSrc && authSuccess);
AMFValue status(AMF_OBJECT);
@ -158,6 +157,12 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) {
setSocketFlags();
};
if(_mediaInfo._app.empty() || _mediaInfo._streamid.empty()){
//不允许莫名其妙的推流url
onRes("rtmp推流url非法", false, false, false);
return;
}
Broadcast::PublishAuthInvoker invoker = [weakSelf,onRes,pToken](const string &err,bool enableRtxp,bool enableHls,bool enableMP4){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
@ -266,6 +271,8 @@ void RtmpSession::sendPlayResponse(const string &err,const RtmpMediaSource::Ptr
onSendMedia(pkt);
});
//音频同步于视频
_stamp[0].syncTo(_stamp[1]);
_pRingReader = src->getRing()->attach(getPoller());
weak_ptr<RtmpSession> weakSelf = dynamic_pointer_cast<RtmpSession>(shared_from_this());
_pRingReader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt) {

View File

@ -44,6 +44,7 @@ inline void AMFValue::destroy() {
break;
}
}
inline void AMFValue::init() {
switch (_type) {
case AMF_OBJECT:
@ -60,14 +61,13 @@ inline void AMFValue::init() {
default:
break;
}
}
AMFValue::AMFValue(AMFType type) :
_type(type) {
init();
}
AMFValue::~AMFValue() {
destroy();
}
@ -78,7 +78,6 @@ AMFValue::AMFValue(const char *s) :
*_value.string = s;
}
AMFValue::AMFValue(const std::string &s) :
_type(AMF_STRING) {
init();
@ -108,15 +107,7 @@ AMFValue::AMFValue(const AMFValue &from) :
*this = from;
}
AMFValue::AMFValue(AMFValue &&from) {
*this = std::forward<AMFValue>(from);
}
AMFValue& AMFValue::operator =(const AMFValue &from) {
return *this = const_cast<AMFValue &&>(from);
}
AMFValue& AMFValue::operator =(AMFValue &&from) {
AMFValue& AMFValue::operator = (const AMFValue &from) {
destroy();
_type = from._type;
init();
@ -144,7 +135,6 @@ AMFValue& AMFValue::operator =(AMFValue &&from) {
break;
}
return *this;
}
void AMFValue::clear() {
@ -236,7 +226,6 @@ string AMFValue::to_string() const{
}
}
const AMFValue& AMFValue::operator[](const char *str) const {
if (_type != AMF_OBJECT && _type != AMF_ECMA_ARRAY) {
throw std::runtime_error("AMF not a object");
@ -338,6 +327,7 @@ AMFEncoder & AMFEncoder::operator <<(const char *s) {
}
return *this;
}
AMFEncoder & AMFEncoder::operator <<(const std::string &s) {
if (!s.empty()) {
buf += char(AMF0_STRING);
@ -349,18 +339,22 @@ AMFEncoder & AMFEncoder::operator <<(const std::string &s) {
}
return *this;
}
AMFEncoder & AMFEncoder::operator <<(std::nullptr_t) {
buf += char(AMF0_NULL);
return *this;
}
AMFEncoder & AMFEncoder::write_undefined() {
buf += char(AMF0_UNDEFINED);
return *this;
}
AMFEncoder & AMFEncoder::operator <<(const int n){
return (*this) << (double)n;
}
AMFEncoder & AMFEncoder::operator <<(const double n) {
buf += char(AMF0_NUMBER);
uint64_t encoded = 0;

View File

@ -40,6 +40,7 @@ public:
typedef std::map<std::string, AMFValue> mapType;
typedef std::vector<AMFValue> arrayType;
~AMFValue();
AMFValue(AMFType type = AMF_NULL);
AMFValue(const char *s);
AMFValue(const std::string &s);
@ -47,10 +48,7 @@ public:
AMFValue(int i);
AMFValue(bool b);
AMFValue(const AMFValue &from);
AMFValue(AMFValue &&from);
AMFValue &operator =(const AMFValue &from);
AMFValue &operator =(AMFValue &&from);
~AMFValue();
AMFValue &operator = (const AMFValue &from);
void clear();
AMFType type() const ;

View File

@ -8,18 +8,211 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#if defined(ENABLE_RTPPROXY)
#include "Decoder.h"
#include "PSDecoder.h"
#include "TSDecoder.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
#include "Extension/AAC.h"
#include "Extension/G711.h"
#if defined(ENABLE_RTPPROXY) || defined(ENABLE_HLS)
#include "mpeg-ts-proto.h"
#endif
namespace mediakit {
Decoder::Ptr Decoder::createDecoder(Decoder::Type type) {
static Decoder::Ptr createDecoder_l(DecoderImp::Type type) {
switch (type){
case decoder_ps : return std::make_shared<PSDecoder>();
case decoder_ts : return std::make_shared<TSDecoder>();
default : return nullptr;
case DecoderImp::decoder_ps:
#ifdef ENABLE_RTPPROXY
return std::make_shared<PSDecoder>();
#else
WarnL << "创建ps解复用器失败请打开ENABLE_RTPPROXY然后重新编译";
return nullptr;
#endif//ENABLE_RTPPROXY
case DecoderImp::decoder_ts:
#ifdef ENABLE_HLS
return std::make_shared<TSDecoder>();
#else
WarnL << "创建mpegts解复用器失败请打开ENABLE_HLS然后重新编译";
return nullptr;
#endif//ENABLE_HLS
default: return nullptr;
}
}
/////////////////////////////////////////////////////////////
DecoderImp::Ptr DecoderImp::createDecoder(Type type, MediaSinkInterface *sink){
auto decoder = createDecoder_l(type);
if(!decoder){
return nullptr;
}
return DecoderImp::Ptr(new DecoderImp(decoder, sink));
}
int DecoderImp::input(const uint8_t *data, int bytes){
return _decoder->input(data, bytes);
}
DecoderImp::DecoderImp(const Decoder::Ptr &decoder, MediaSinkInterface *sink){
_decoder = decoder;
_sink = sink;
_decoder->setOnDecode([this](int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes){
onDecode(stream,codecid,flags,pts,dts,data,bytes);
});
}
#if defined(ENABLE_RTPPROXY) || defined(ENABLE_HLS)
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id
static const char *getCodecName(int codec_id) {
switch (codec_id) {
SWITCH_CASE(PSI_STREAM_MPEG1);
SWITCH_CASE(PSI_STREAM_MPEG2);
SWITCH_CASE(PSI_STREAM_AUDIO_MPEG1);
SWITCH_CASE(PSI_STREAM_MP3);
SWITCH_CASE(PSI_STREAM_AAC);
SWITCH_CASE(PSI_STREAM_MPEG4);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC_LATM);
SWITCH_CASE(PSI_STREAM_H264);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC);
SWITCH_CASE(PSI_STREAM_H265);
SWITCH_CASE(PSI_STREAM_AUDIO_AC3);
SWITCH_CASE(PSI_STREAM_AUDIO_EAC3);
SWITCH_CASE(PSI_STREAM_AUDIO_DTS);
SWITCH_CASE(PSI_STREAM_VIDEO_DIRAC);
SWITCH_CASE(PSI_STREAM_VIDEO_VC1);
SWITCH_CASE(PSI_STREAM_VIDEO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_G711A);
SWITCH_CASE(PSI_STREAM_AUDIO_G711U);
SWITCH_CASE(PSI_STREAM_AUDIO_G722);
SWITCH_CASE(PSI_STREAM_AUDIO_G723);
SWITCH_CASE(PSI_STREAM_AUDIO_G729);
default : return "unknown codec";
}
}
void FrameMerger::inputFrame(const Frame::Ptr &frame,const function<void(uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer)> &cb){
if (!_frameCached.empty() && _frameCached.back()->dts() != frame->dts()) {
Frame::Ptr back = _frameCached.back();
Buffer::Ptr merged_frame = back;
if(_frameCached.size() != 1){
string merged;
_frameCached.for_each([&](const Frame::Ptr &frame){
merged.append(frame->data(),frame->size());
});
merged_frame = std::make_shared<BufferString>(std::move(merged));
}
cb(back->dts(),back->pts(),merged_frame);
_frameCached.clear();
}
_frameCached.emplace_back(Frame::getCacheAbleFrame(frame));
}
void DecoderImp::onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes) {
pts /= 90;
dts /= 90;
switch (codecid) {
case PSI_STREAM_H264: {
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoL<< "got video track: H264";
auto track = std::make_shared<H264Track>();
onTrack(track);
}
if (codecid != _codecid_video) {
WarnL<< "video track change to H264 from codecid:" << getCodecName(_codecid_video);
return;
}
auto frame = std::make_shared<H264FrameNoCacheAble>((char *) data, bytes, dts, pts,0);
_merger.inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
onFrame(std::make_shared<H264FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts, prefixSize(buffer->data(), buffer->size())));
});
break;
}
case PSI_STREAM_H265: {
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoL<< "got video track: H265";
auto track = std::make_shared<H265Track>();
onTrack(track);
}
if (codecid != _codecid_video) {
WarnL<< "video track change to H265 from codecid:" << getCodecName(_codecid_video);
return;
}
auto frame = std::make_shared<H265FrameNoCacheAble>((char *) data, bytes, dts, pts, 0);
_merger.inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
onFrame(std::make_shared<H265FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts, prefixSize(buffer->data(), buffer->size())));
});
break;
}
case PSI_STREAM_AAC: {
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoL<< "got audio track: AAC";
auto track = std::make_shared<AACTrack>();
onTrack(track);
}
if (codecid != _codecid_audio) {
WarnL<< "audio track change to AAC from codecid:" << getCodecName(_codecid_audio);
return;
}
onFrame(std::make_shared<AACFrameNoCacheAble>((char *) data, bytes, dts, 0, 7));
break;
}
case PSI_STREAM_AUDIO_G711A:
case PSI_STREAM_AUDIO_G711U: {
auto codec = codecid == PSI_STREAM_AUDIO_G711A ? CodecG711A : CodecG711U;
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoL<< "got audio track: G711";
//G711传统只支持 8000/1/16的规格FFmpeg貌似做了扩展但是这里不管它了
auto track = std::make_shared<G711Track>(codec, 8000, 1, 16);
onTrack(track);
}
if (codecid != _codecid_audio) {
WarnL<< "audio track change to G711 from codecid:" << getCodecName(_codecid_audio);
return;
}
auto frame = std::make_shared<G711FrameNoCacheAble>((char *) data, bytes, dts);
frame->setCodec(codec);
onFrame(frame);
break;
}
default:
if(codecid != 0){
WarnL<< "unsupported codec type:" << getCodecName(codecid) << " " << (int)codecid;
}
break;
}
}
#else
void DecoderImp::onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes) {}
#endif
void DecoderImp::onTrack(const Track::Ptr &track) {
_sink->addTrack(track);
}
void DecoderImp::onFrame(const Frame::Ptr &frame) {
_sink->inputFrame(frame);
}
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)

View File

@ -11,31 +11,66 @@
#ifndef ZLMEDIAKIT_DECODER_H
#define ZLMEDIAKIT_DECODER_H
#if defined(ENABLE_RTPPROXY)
#include <stdint.h>
#include <memory>
#include <functional>
#include "Decoder.h"
#include "Common/MediaSink.h"
using namespace std;
namespace mediakit {
class Decoder {
public:
typedef std::shared_ptr<Decoder> Ptr;
typedef enum {
decoder_ts = 0,
decoder_ps
}Type;
typedef std::function<void(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes)> onDecode;
virtual int input(const uint8_t *data, int bytes) = 0;
virtual void setOnDecode(const onDecode &decode) = 0;
static Ptr createDecoder(Type type);
protected:
Decoder() = default;
virtual ~Decoder() = default;
};
/**
* frame
*/
class FrameMerger {
public:
FrameMerger() = default;
~FrameMerger() = default;
void inputFrame(const Frame::Ptr &frame,const function<void(uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer)> &cb);
private:
List<Frame::Ptr> _frameCached;
};
class DecoderImp{
public:
typedef enum {
decoder_ts = 0,
decoder_ps
}Type;
typedef std::shared_ptr<DecoderImp> Ptr;
~DecoderImp() = default;
static Ptr createDecoder(Type type, MediaSinkInterface *sink);
int input(const uint8_t *data, int bytes);
protected:
void onTrack(const Track::Ptr &track);
void onFrame(const Frame::Ptr &frame);
private:
DecoderImp(const Decoder::Ptr &decoder, MediaSinkInterface *sink);
void onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes);
private:
Decoder::Ptr _decoder;
MediaSinkInterface *_sink;
FrameMerger _merger;
int _codecid_video = 0;
int _codecid_audio = 0;
};
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
#endif //ZLMEDIAKIT_DECODER_H

View File

@ -16,8 +16,9 @@ using namespace toolkit;
namespace mediakit{
RtpDecoder::RtpDecoder() {
RtpDecoder::RtpDecoder(const char *codec) {
_buffer = std::make_shared<BufferRaw>();
_codec = codec;
}
RtpDecoder::~RtpDecoder() {
@ -46,7 +47,7 @@ void RtpDecoder::decodeRtp(const void *data, int bytes) {
uint8_t rtp_type = 0x7F & ((uint8_t *) data)[1];
InfoL << "rtp type:" << (int) rtp_type;
_rtp_decoder = rtp_payload_decode_create(rtp_type, "MP2P", &s_func, this);
_rtp_decoder = rtp_payload_decode_create(rtp_type, _codec.data(), &s_func, this);
if (!_rtp_decoder) {
WarnL << "unsupported rtp type:" << (int) rtp_type << ",size:" << bytes << ",hexdump" << hexdump(data, bytes > 16 ? 16 : bytes);
}

View File

@ -19,14 +19,15 @@ namespace mediakit{
class RtpDecoder {
public:
RtpDecoder();
RtpDecoder(const char *codec = "MP2P");
virtual ~RtpDecoder();
protected:
void decodeRtp(const void *data, int bytes);
protected:
virtual void onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestamp, int flags) = 0;
private:
void *_rtp_decoder = nullptr;
BufferRaw::Ptr _buffer;
string _codec;
};
}//namespace mediakit

View File

@ -9,44 +9,13 @@
*/
#if defined(ENABLE_RTPPROXY)
#include "mpeg-ts-proto.h"
#include "RtpProcess.h"
#include "Util/File.h"
#include "Extension/H265.h"
#include "Extension/AAC.h"
#include "Extension/G711.h"
#include "Http/HttpTSPlayer.h"
#define RTP_APP_NAME "rtp"
namespace mediakit{
/**
* frame
*/
class FrameMerger {
public:
FrameMerger() = default;
virtual ~FrameMerger() = default;
void inputFrame(const Frame::Ptr &frame,const function<void(uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer)> &cb){
if (!_frameCached.empty() && _frameCached.back()->dts() != frame->dts()) {
Frame::Ptr back = _frameCached.back();
Buffer::Ptr merged_frame = back;
if(_frameCached.size() != 1){
string merged;
_frameCached.for_each([&](const Frame::Ptr &frame){
merged.append(frame->data(),frame->size());
});
merged_frame = std::make_shared<BufferString>(std::move(merged));
}
cb(back->dts(),back->pts(),merged_frame);
_frameCached.clear();
}
_frameCached.emplace_back(Frame::getCacheAbleFrame(frame));
}
private:
List<Frame::Ptr> _frameCached;
};
string printSSRC(uint32_t ui32Ssrc) {
char tmp[9] = { 0 };
ui32Ssrc = htonl(ui32Ssrc);
@ -101,7 +70,6 @@ RtpProcess::RtpProcess(uint32_t ssrc) {
});
}
}
_merger = std::make_shared<FrameMerger>();
}
RtpProcess::~RtpProcess() {
@ -147,7 +115,6 @@ bool RtpProcess::inputRtp(const Socket::Ptr &sock, const char *data, int data_le
}
_total_bytes += data_len;
_last_rtp_time.resetTime();
bool ret = handleOneRtp(0,_track,(unsigned char *)data,data_len);
if(dts_out){
*dts_out = _dts;
@ -157,12 +124,12 @@ bool RtpProcess::inputRtp(const Socket::Ptr &sock, const char *data, int data_le
//判断是否为ts负载
static inline bool checkTS(const uint8_t *packet, int bytes){
return bytes % 188 == 0 && packet[0] == 0x47;
return bytes % TS_PACKET_SIZE == 0 && packet[0] == TS_SYNC_BYTE;
}
void RtpProcess::onRtpSorted(const RtpPacket::Ptr &rtp, int) {
if(rtp->sequence != _sequence + 1 && rtp->sequence != 0){
WarnP(this) << rtp->sequence << " != " << _sequence << "+1";
if(rtp->sequence != _sequence + 1 && _sequence != 0){
WarnP(this) << "rtp丢包:" << rtp->sequence << " != " << _sequence << "+1" << ",公网环境下请使用tcp方式推流";
}
_sequence = rtp->sequence;
if(_save_file_rtp){
@ -179,155 +146,38 @@ void RtpProcess::onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestam
fwrite((uint8_t *)packet,bytes, 1, _save_file_ps.get());
}
if(!_decoder){
if (!_decoder) {
//创建解码器
if(checkTS(packet, bytes)){
if (checkTS(packet, bytes)) {
//猜测是ts负载
InfoP(this) << "judged to be TS";
_decoder = Decoder::createDecoder(Decoder::decoder_ts);
}else{
_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ts, this);
} else {
//猜测是ps负载
InfoP(this) << "judged to be PS";
_decoder = Decoder::createDecoder(Decoder::decoder_ps);
_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ps, this);
}
_decoder->setOnDecode([this](int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes){
onDecode(stream,codecid,flags,pts,dts,data,bytes);
});
}
auto ret = _decoder->input((uint8_t *)packet,bytes);
if(ret != bytes){
WarnP(this) << ret << " != " << bytes << " " << flags;
if (_decoder) {
auto ret = _decoder->input((uint8_t *) packet, bytes);
if (ret != bytes) {
WarnP(this) << ret << " != " << bytes << " " << flags;
}
}
}
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id
static const char *getCodecName(int codec_id) {
switch (codec_id) {
SWITCH_CASE(PSI_STREAM_MPEG1);
SWITCH_CASE(PSI_STREAM_MPEG2);
SWITCH_CASE(PSI_STREAM_AUDIO_MPEG1);
SWITCH_CASE(PSI_STREAM_MP3);
SWITCH_CASE(PSI_STREAM_AAC);
SWITCH_CASE(PSI_STREAM_MPEG4);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC_LATM);
SWITCH_CASE(PSI_STREAM_H264);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC);
SWITCH_CASE(PSI_STREAM_H265);
SWITCH_CASE(PSI_STREAM_AUDIO_AC3);
SWITCH_CASE(PSI_STREAM_AUDIO_EAC3);
SWITCH_CASE(PSI_STREAM_AUDIO_DTS);
SWITCH_CASE(PSI_STREAM_VIDEO_DIRAC);
SWITCH_CASE(PSI_STREAM_VIDEO_VC1);
SWITCH_CASE(PSI_STREAM_VIDEO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_G711A);
SWITCH_CASE(PSI_STREAM_AUDIO_G711U);
SWITCH_CASE(PSI_STREAM_AUDIO_G722);
SWITCH_CASE(PSI_STREAM_AUDIO_G723);
SWITCH_CASE(PSI_STREAM_AUDIO_G729);
default : return "unknown codec";
void RtpProcess::inputFrame(const Frame::Ptr &frame){
_last_rtp_time.resetTime();
_dts = frame->dts();
if (_save_file_video && frame->getTrackType() == TrackVideo) {
fwrite((uint8_t *) frame->data(), frame->size(), 1, _save_file_video.get());
}
_muxer->inputFrame(frame);
}
void RtpProcess::onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes) {
pts /= 90;
dts /= 90;
_stamps[codecid].revise(dts,pts,dts,pts,false);
switch (codecid) {
case PSI_STREAM_H264: {
_dts = dts;
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoP(this) << "got video track: H264";
auto track = std::make_shared<H264Track>();
_muxer->addTrack(track);
}
if (codecid != _codecid_video) {
WarnP(this) << "video track change to H264 from codecid:" << getCodecName(_codecid_video);
return;
}
if(_save_file_video){
fwrite((uint8_t *)data,bytes, 1, _save_file_video.get());
}
auto frame = std::make_shared<H264FrameNoCacheAble>((char *) data, bytes, dts, pts,0);
_merger->inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
_muxer->inputFrame(std::make_shared<H264FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts,4));
});
break;
}
case PSI_STREAM_H265: {
_dts = dts;
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoP(this) << "got video track: H265";
auto track = std::make_shared<H265Track>();
_muxer->addTrack(track);
}
if (codecid != _codecid_video) {
WarnP(this) << "video track change to H265 from codecid:" << getCodecName(_codecid_video);
return;
}
if(_save_file_video){
fwrite((uint8_t *)data,bytes, 1, _save_file_video.get());
}
auto frame = std::make_shared<H265FrameNoCacheAble>((char *) data, bytes, dts, pts, 0);
_merger->inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
_muxer->inputFrame(std::make_shared<H265FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts, 4));
});
break;
}
case PSI_STREAM_AAC: {
_dts = dts;
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoP(this) << "got audio track: AAC";
auto track = std::make_shared<AACTrack>();
_muxer->addTrack(track);
}
if (codecid != _codecid_audio) {
WarnP(this) << "audio track change to AAC from codecid:" << getCodecName(_codecid_audio);
return;
}
_muxer->inputFrame(std::make_shared<AACFrameNoCacheAble>((char *) data, bytes, dts, 0, 7));
break;
}
case PSI_STREAM_AUDIO_G711A:
case PSI_STREAM_AUDIO_G711U: {
_dts = dts;
auto codec = codecid == PSI_STREAM_AUDIO_G711A ? CodecG711A : CodecG711U;
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoP(this) << "got audio track: G711";
//G711传统只支持 8000/1/16的规格FFmpeg貌似做了扩展但是这里不管它了
auto track = std::make_shared<G711Track>(codec, 8000, 1, 16);
_muxer->addTrack(track);
}
if (codecid != _codecid_audio) {
WarnP(this) << "audio track change to G711 from codecid:" << getCodecName(_codecid_audio);
return;
}
_muxer->inputFrame(std::make_shared<G711FrameNoCacheAble>(codec, (char *) data, bytes, dts));
break;
}
default:
if(codecid != 0){
WarnP(this) << "unsupported codec type:" << getCodecName(codecid) << " " << (int)codecid;
}
return;
}
void RtpProcess::addTrack(const Track::Ptr & track){
_muxer->addTrack(track);
}
bool RtpProcess::alive() {
@ -412,6 +262,5 @@ void RtpProcess::emitOnPublish() {
}
}
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)

View File

@ -23,8 +23,7 @@ using namespace mediakit;
namespace mediakit{
string printSSRC(uint32_t ui32Ssrc);
class FrameMerger;
class RtpProcess : public RtpReceiver , public RtpDecoder, public SockInfo, public std::enable_shared_from_this<RtpProcess>{
class RtpProcess : public RtpReceiver , public RtpDecoder, public SockInfo, public MediaSinkInterface, public std::enable_shared_from_this<RtpProcess>{
public:
typedef std::shared_ptr<RtpProcess> Ptr;
RtpProcess(uint32_t ssrc);
@ -44,7 +43,9 @@ public:
protected:
void onRtpSorted(const RtpPacket::Ptr &rtp, int track_index) override ;
void onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestamp, int flags) override;
void onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts, const void *data,int bytes);
void inputFrame(const Frame::Ptr &frame) override;
void addTrack(const Track::Ptr & track) override;
void resetTracks() override {};
private:
void emitOnPublish();
@ -57,14 +58,10 @@ private:
SdpTrack::Ptr _track;
struct sockaddr *_addr = nullptr;
uint16_t _sequence = 0;
int _codecid_video = 0;
int _codecid_audio = 0;
MultiMediaSourceMuxer::Ptr _muxer;
std::shared_ptr<FrameMerger> _merger;
Ticker _last_rtp_time;
unordered_map<int,Stamp> _stamps;
uint32_t _dts = 0;
Decoder::Ptr _decoder;
DecoderImp::Ptr _decoder;
std::weak_ptr<MediaSourceEvent> _listener;
MediaInfo _media_info;
uint64_t _total_bytes = 0;

View File

@ -8,34 +8,38 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#if defined(ENABLE_RTPPROXY)
#include "mpeg-ts.h"
#include "TSDecoder.h"
#define TS_PACKET_SIZE 188
namespace mediakit {
bool TSSegment::isTSPacket(const char *data, int len){
return len == TS_PACKET_SIZE && ((uint8_t*)data)[0] == TS_SYNC_BYTE;
}
void TSSegment::setOnSegment(const TSSegment::onSegment &cb) {
_onSegment = cb;
}
int64_t TSSegment::onRecvHeader(const char *data, uint64_t len) {
if (!isTSPacket(data, len)) {
WarnL << "不是ts包:" << (int) (data[0]) << " " << len;
return 0;
}
_onSegment(data, len);
return 0;
}
const char *TSSegment::onSearchPacketTail(const char *data, int len) {
if (len < _size + 1) {
if (len == _size && ((uint8_t *) data)[0] == 0x47) {
if (len == _size && ((uint8_t *) data)[0] == TS_SYNC_BYTE) {
return data + _size;
}
return nullptr;
}
//下一个包头
if (((uint8_t *) data)[_size] == 0x47) {
if (((uint8_t *) data)[_size] == TS_SYNC_BYTE) {
return data + _size;
}
auto pos = memchr(data + _size, 0x47, len - _size);
auto pos = memchr(data + _size, TS_SYNC_BYTE, len - _size);
if (pos) {
return (char *) pos;
}
@ -44,12 +48,10 @@ const char *TSSegment::onSearchPacketTail(const char *data, int len) {
////////////////////////////////////////////////////////////////
TSDecoder::TSDecoder() : _ts_segment(TS_PACKET_SIZE) {
#if defined(ENABLE_HLS)
#include "mpeg-ts.h"
TSDecoder::TSDecoder() : _ts_segment() {
_ts_segment.setOnSegment([this](const char *data,uint64_t len){
if(((uint8_t*)data)[0] != 0x47 || len != TS_PACKET_SIZE ){
WarnL << "不是ts包:" << (int)(data[0]) << " " << len;
return;
}
ts_demuxer_input(_demuxer_ctx,(uint8_t*)data,len);
});
_demuxer_ctx = ts_demuxer_create([](void* param, int program, int stream, int codecid, int flags, int64_t pts, int64_t dts, const void* data, size_t bytes){
@ -66,8 +68,8 @@ TSDecoder::~TSDecoder() {
}
int TSDecoder::input(const uint8_t *data, int bytes) {
if(bytes == TS_PACKET_SIZE && ((uint8_t*)data)[0] == 0x47){
return ts_demuxer_input(_demuxer_ctx,(uint8_t*)data,bytes);
if (TSSegment::isTSPacket((char *)data, bytes)) {
return ts_demuxer_input(_demuxer_ctx, (uint8_t *) data, bytes);
}
_ts_segment.input((char*)data,bytes);
return bytes;
@ -76,6 +78,6 @@ int TSDecoder::input(const uint8_t *data, int bytes) {
void TSDecoder::setOnDecode(const Decoder::onDecode &decode) {
_on_decode = decode;
}
#endif//defined(ENABLE_HLS)
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)

View File

@ -11,7 +11,6 @@
#ifndef ZLMEDIAKIT_TSDECODER_H
#define ZLMEDIAKIT_TSDECODER_H
#if defined(ENABLE_RTPPROXY)
#include "Util/logger.h"
#include "Http/HttpRequestSplitter.h"
#include "Decoder.h"
@ -19,13 +18,17 @@
using namespace toolkit;
namespace mediakit {
//ts包拆分器
#define TS_PACKET_SIZE 188
#define TS_SYNC_BYTE 0x47
//TS包分割器用于split一个一个的ts包
class TSSegment : public HttpRequestSplitter {
public:
typedef std::function<void(const char *data,uint64_t len)> onSegment;
TSSegment(int size = 188) : _size(size){}
TSSegment(int size = TS_PACKET_SIZE) : _size(size){}
~TSSegment(){}
void setOnSegment(const onSegment &cb);
static bool isTSPacket(const char *data, int len);
protected:
int64_t onRecvHeader(const char *data, uint64_t len) override ;
const char *onSearchPacketTail(const char *data, int len) override ;
@ -34,6 +37,7 @@ private:
onSegment _onSegment;
};
#if defined(ENABLE_HLS)
//ts解析器
class TSDecoder : public Decoder {
public:
@ -46,7 +50,7 @@ private:
struct ts_demuxer_t* _demuxer_ctx = nullptr;
onDecode _on_decode;
};
#endif//defined(ENABLE_HLS)
}//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
#endif //ZLMEDIAKIT_TSDECODER_H

View File

@ -17,7 +17,9 @@ UdpRecver::UdpRecver() {
}
UdpRecver::~UdpRecver() {
_sock->setOnRead(nullptr);
if(_sock){
_sock->setOnRead(nullptr);
}
}
bool UdpRecver::initSock(uint16_t local_port,const char *local_ip) {

View File

@ -28,18 +28,18 @@ RtpPacket::Ptr RtpInfo::makeRtp(TrackType type, const void* data, unsigned int l
pucRtp[2] = ui16RtpLen >> 8;
pucRtp[3] = ui16RtpLen & 0x00FF;
pucRtp[4] = 0x80;
pucRtp[5] = (mark << 7) | _ui8PlayloadType;
pucRtp[5] = (mark << 7) | _ui8PayloadType;
memcpy(&pucRtp[6], &sq, 2);
memcpy(&pucRtp[8], &ts, 4);
//ssrc
memcpy(&pucRtp[12], &sc, 4);
if(data){
//playload
//payload
memcpy(&pucRtp[16], data, len);
}
rtppkt->PT = _ui8PlayloadType;
rtppkt->PT = _ui8PayloadType;
rtppkt->interleaved = _ui8Interleaved;
rtppkt->mark = mark;
rtppkt->sequence = _ui16Sequence;

View File

@ -66,7 +66,7 @@ public:
RtpInfo(uint32_t ui32Ssrc,
uint32_t ui32MtuSize,
uint32_t ui32SampleRate,
uint8_t ui8PlayloadType,
uint8_t ui8PayloadType,
uint8_t ui8Interleaved) {
if(ui32Ssrc == 0){
ui32Ssrc = ((uint64_t)this) & 0xFFFFFFFF;
@ -74,7 +74,7 @@ public:
_ui32Ssrc = ui32Ssrc;
_ui32SampleRate = ui32SampleRate;
_ui32MtuSize = ui32MtuSize;
_ui8PlayloadType = ui8PlayloadType;
_ui8PayloadType = ui8PayloadType;
_ui8Interleaved = ui8Interleaved;
}
@ -84,8 +84,8 @@ public:
return _ui8Interleaved;
}
int getPlayloadType() const {
return _ui8PlayloadType;
int getPayloadType() const {
return _ui8PayloadType;
}
int getSampleRate() const {
@ -110,7 +110,7 @@ protected:
uint32_t _ui32Ssrc;
uint32_t _ui32SampleRate;
uint32_t _ui32MtuSize;
uint8_t _ui8PlayloadType;
uint8_t _ui8PayloadType;
uint8_t _ui8Interleaved;
uint16_t _ui16Sequence = 0;
uint32_t _ui32TimeStamp = 0;

View File

@ -81,6 +81,7 @@ RtpMultiCaster::~RtpMultiCaster() {
_pReader->setDetachCB(nullptr);
DebugL;
}
RtpMultiCaster::RtpMultiCaster(const EventPoller::Ptr &poller,const string &strLocalIp,const string &strVhost,const string &strApp,const string &strStream) {
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA,strVhost,strApp, strStream));
if(!src){

View File

@ -34,11 +34,11 @@ bool RtpReceiver::handleOneRtp(int track_index,SdpTrack::Ptr &track, unsigned ch
}
uint8_t padding = 0;
if (rtp_raw_ptr[0] & 0x40) {
if (rtp_raw_ptr[0] & 0x20) {
//获取padding大小
padding = rtp_raw_ptr[rtp_raw_len - 1];
//移除padding flag
rtp_raw_ptr[0] &= ~0x40;
rtp_raw_ptr[0] &= ~0x20;
//移除padding字节
rtp_raw_len -= padding;
}

Some files were not shown because too many files have changed in this diff Show More