perf: 调整VideoStack相关代码 (#3642)

1. 修复编译宏不生效问题
2. 新增reset接口,用于不断流的情况下变更拼接参数
This commit is contained in:
KkemChen 2024-06-19 14:06:02 +08:00 committed by GitHub
parent efc683228c
commit 2f6723f602
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 267 additions and 321 deletions

View File

@ -452,6 +452,15 @@ elseif(NOT ANDROID OR IOS)
update_cached_list(MK_LINK_LIBRARIES pthread) update_cached_list(MK_LINK_LIBRARIES pthread)
endif() endif()
if(ENABLE_VIDEOSTACK)
if(ENABLE_FFMPEG AND ENABLE_X264)
message(STATUS "ENABLE_VIDEOSTACK defined")
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_VIDEOSTACK)
else()
message(WARNING "ENABLE_VIDEOSTACK requires ENABLE_FFMPEG and ENABLE_X264")
endif ()
endif ()
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Solution folders: # Solution folders:
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------

View File

@ -23,18 +23,10 @@
INSTANCE_IMP(VideoStackManager) INSTANCE_IMP(VideoStackManager)
Param::~Param() Param::~Param() { VideoStackManager::Instance().unrefChannel(id, width, height, pixfmt); }
{
VideoStackManager::Instance().unrefChannel(
id, width, height, pixfmt);
}
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt) Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
: _id(id) : _id(id), _width(width), _height(height), _pixfmt(pixfmt) {
, _width(width)
, _height(height)
, _pixfmt(pixfmt)
{
_tmp = std::make_shared<mediakit::FFmpegFrame>(); _tmp = std::make_shared<mediakit::FFmpegFrame>();
_tmp->get()->width = _width; _tmp->get()->width = _width;
@ -53,64 +45,52 @@ Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pix
_tmp = _sws->inputFrame(frame); _tmp = _sws->inputFrame(frame);
} }
void Channel::addParam(const std::weak_ptr<Param>& p) void Channel::addParam(const std::weak_ptr<Param>& p) {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
_params.push_back(p); _params.push_back(p);
} }
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame) void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
{
std::weak_ptr<Channel> weakSelf = shared_from_this(); std::weak_ptr<Channel> weakSelf = shared_from_this();
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller(); _poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
_poller->async([weakSelf, frame]() { _poller->async([weakSelf, frame]() {
auto self = weakSelf.lock(); auto self = weakSelf.lock();
if (!self) { if (!self) { return; }
return;
}
self->_tmp = self->_sws->inputFrame(frame); self->_tmp = self->_sws->inputFrame(frame);
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); }); self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
}); });
} }
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func) void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func) {
{
for (auto& wp : _params) { for (auto& wp : _params) {
if (auto sp = wp.lock()) { if (auto sp = wp.lock()) { func(sp); }
func(sp);
}
}
}
void Channel::fillBuffer(const Param::Ptr& p)
{
if (auto buf = p->weak_buf.lock()) {
copyData(buf, p);
} }
} }
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p) void Channel::fillBuffer(const Param::Ptr& p) {
{ if (auto buf = p->weak_buf.lock()) { copyData(buf, p); }
}
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p) {
switch (p->pixfmt) { switch (p->pixfmt) {
case AV_PIX_FMT_YUV420P: { case AV_PIX_FMT_YUV420P: {
for (int i = 0; i < p->height; i++) { for (int i = 0; i < p->height; i++) {
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX, memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i, _tmp->get()->data[0] + _tmp->get()->linesize[0] * i, _tmp->get()->width);
_tmp->get()->width);
} }
// 确保height为奇数时也能正确的复制到最后一行uv数据 // 确保height为奇数时也能正确的复制到最后一行uv数据
for (int i = 0; i < (p->height + 1) / 2; i++) { for (int i = 0; i < (p->height + 1) / 2; i++) {
// U平面 // U平面
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2, memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) +
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i, p->posX / 2,
_tmp->get()->width / 2); _tmp->get()->data[1] + _tmp->get()->linesize[1] * i, _tmp->get()->width / 2);
// V平面 // V平面
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2, memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) +
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i, p->posX / 2,
_tmp->get()->width / 2); _tmp->get()->data[2] + _tmp->get()->linesize[2] * i, _tmp->get()->width / 2);
} }
break; break;
} }
@ -119,19 +99,15 @@ void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr&
break; break;
} }
default: default: WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt); break;
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
break;
} }
} }
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn) void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn) {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
_channels.push_back(chn); _channels.push_back(chn);
} }
void StackPlayer::play() void StackPlayer::play() {
{
auto url = _url; auto url = _url;
// 创建拉流 解码对象 // 创建拉流 解码对象
@ -146,13 +122,9 @@ void StackPlayer::play()
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable { _player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what(); TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
auto strongPlayer = weakPlayer.lock(); auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) { if (!strongPlayer) { return; }
return;
}
auto self = weakSelf.lock(); auto self = weakSelf.lock();
if (!self) { if (!self) { return; }
return;
}
if (!ex) { if (!ex) {
// 取消定时器 // 取消定时器
@ -164,19 +136,18 @@ void StackPlayer::play()
self->rePlay(url); self->rePlay(url);
} }
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false)); auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(
strongPlayer->getTrack(mediakit::TrackVideo, false));
// auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false)); // auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
if (videoTrack) { if (videoTrack) {
// TODO:添加使用显卡还是cpu解码的判断逻辑 // TODO:添加使用显卡还是cpu解码的判断逻辑
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"}); auto decoder = std::make_shared<mediakit::FFmpegDecoder>(
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" }); videoTrack, 0, std::vector<std::string>{"h264", "hevc"});
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable { decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
auto self = weakSelf.lock(); auto self = weakSelf.lock();
if (!self) { if (!self) { return; }
return;
}
self->onFrame(frame); self->onFrame(frame);
}); });
@ -190,14 +161,10 @@ void StackPlayer::play()
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) { _player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what(); TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
auto strongPlayer = weakPlayer.lock(); auto strongPlayer = weakPlayer.lock();
if (!strongPlayer) { if (!strongPlayer) { return; }
return;
}
auto self = weakSelf.lock(); auto self = weakSelf.lock();
if (!self) { if (!self) { return; }
return;
}
self->onDisconnect(); self->onDisconnect();
@ -207,18 +174,14 @@ void StackPlayer::play()
_player->play(url); _player->play(url);
} }
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame) void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) { for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) { if (auto chn = weak_chn.lock()) { chn->onFrame(frame); }
chn->onFrame(frame);
}
} }
} }
void StackPlayer::onDisconnect() void StackPlayer::onDisconnect() {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
for (auto& weak_chn : _channels) { for (auto& weak_chn : _channels) {
if (auto chn = weak_chn.lock()) { if (auto chn = weak_chn.lock()) {
@ -228,31 +191,22 @@ void StackPlayer::onDisconnect()
} }
} }
void StackPlayer::rePlay(const std::string& url) void StackPlayer::rePlay(const std::string& url) {
{
_failedCount++; _failedCount++;
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000));// 步进延迟 重试间隔 auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000));// 步进延迟 重试间隔
std::weak_ptr<StackPlayer> weakSelf = shared_from_this(); std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
_timer = std::make_shared<toolkit::Timer>( _timer = std::make_shared<toolkit::Timer>(delay / 1000.0f, [weakSelf, url]() {
delay / 1000.0f, [weakSelf, url]() {
auto self = weakSelf.lock(); auto self = weakSelf.lock();
if (!self) { if (!self) {}
}
WarnL << "replay [" << self->_failedCount << "]:" << url; WarnL << "replay [" << self->_failedCount << "]:" << url;
self->_player->play(url); self->_player->play(url);
return false; return false;
}, }, nullptr);
nullptr);
} }
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate) VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt,
: _id(id) float fps, int bitRate)
, _width(width) : _id(id), _width(width), _height(height), _pixfmt(pixfmt), _fps(fps), _bitRate(bitRate) {
, _height(height)
, _pixfmt(pixfmt)
, _fps(fps)
, _bitRate(bitRate)
{
_buffer = std::make_shared<mediakit::FFmpegFrame>(); _buffer = std::make_shared<mediakit::FFmpegFrame>();
@ -262,7 +216,8 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
av_frame_get_buffer(_buffer->get(), 32); av_frame_get_buffer(_buffer->get(), 32);
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id }); _dev = std::make_shared<mediakit::DevChannel>(
mediakit::MediaTuple{DEFAULT_VHOST, "live", _id, ""});
mediakit::VideoInfo info; mediakit::VideoInfo info;
info.codecId = mediakit::CodecH264; info.codecId = mediakit::CodecH264;
@ -278,28 +233,22 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
_isExit = false; _isExit = false;
} }
VideoStack::~VideoStack() VideoStack::~VideoStack() {
{
_isExit = true; _isExit = true;
if (_thread.joinable()) { if (_thread.joinable()) { _thread.join(); }
_thread.join();
}
} }
void VideoStack::setParam(const Params& params) void VideoStack::setParam(const Params& params) {
{
if (_params) { if (_params) {
for (auto& p : (*_params)) { for (auto& p : (*_params)) {
if (!p) if (!p) continue;
continue;
p->weak_buf.reset(); p->weak_buf.reset();
} }
} }
initBgColor(); initBgColor();
for (auto& p : (*params)) { for (auto& p : (*params)) {
if (!p) if (!p) continue;
continue;
p->weak_buf = _buffer; p->weak_buf = _buffer;
if (auto chn = p->weak_chn.lock()) { if (auto chn = p->weak_chn.lock()) {
chn->addParam(p); chn->addParam(p);
@ -309,14 +258,14 @@ void VideoStack::setParam(const Params& params)
_params = params; _params = params;
} }
void VideoStack::start() void VideoStack::start() {
{
_thread = std::thread([&]() { _thread = std::thread([&]() {
uint64_t pts = 0; uint64_t pts = 0;
int frameInterval = 1000 / _fps; int frameInterval = 1000 / _fps;
auto lastEncTP = std::chrono::steady_clock::now(); auto lastEncTP = std::chrono::steady_clock::now();
while (!_isExit) { while (!_isExit) {
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) { if (std::chrono::steady_clock::now() - lastEncTP >
std::chrono::milliseconds(frameInterval)) {
lastEncTP = std::chrono::steady_clock::now(); lastEncTP = std::chrono::steady_clock::now();
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts); _dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
@ -326,8 +275,7 @@ void VideoStack::start()
}); });
} }
void VideoStack::initBgColor() void VideoStack::initBgColor() {
{
// 填充底色 // 填充底色
auto R = 20; auto R = 20;
auto G = 20; auto G = 20;
@ -342,27 +290,19 @@ void VideoStack::initBgColor()
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2); memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
} }
Channel::Ptr VideoStackManager::getChannel(const std::string& id, Channel::Ptr VideoStackManager::getChannel(const std::string& id, int width, int height,
int width, AVPixelFormat pixfmt) {
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt); auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
auto it = _channelMap.find(key); auto it = _channelMap.find(key);
if (it != _channelMap.end()) { if (it != _channelMap.end()) { return it->second->acquire(); }
return it->second->acquire();
}
return createChannel(id, width, height, pixfmt); return createChannel(id, width, height, pixfmt);
} }
void VideoStackManager::unrefChannel(const std::string& id, void VideoStackManager::unrefChannel(const std::string& id, int width, int height,
int width, AVPixelFormat pixfmt) {
int height,
AVPixelFormat pixfmt)
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt); auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
@ -377,8 +317,7 @@ void VideoStackManager::unrefChannel(const std::string& id,
} }
} }
int VideoStackManager::startVideoStack(const Json::Value& json) int VideoStackManager::startVideoStack(const Json::Value& json) {
{
std::string id; std::string id;
int width, height; int width, height;
@ -392,8 +331,7 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
auto stack = std::make_shared<VideoStack>(id, width, height); auto stack = std::make_shared<VideoStack>(id, width, height);
for (auto& p : (*params)) { for (auto& p : (*params)) {
if (!p) if (!p) continue;
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt); p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
} }
@ -405,13 +343,13 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
return 0; return 0;
} }
int VideoStackManager::resetVideoStack(const Json::Value& json) int VideoStackManager::resetVideoStack(const Json::Value& json) {
{
std::string id; std::string id;
int width, height; int width, height;
auto params = parseParams(json, id, width, height); auto params = parseParams(json, id, width, height);
if (!params) { if (!params) {
ErrorL << "Videostack parse params failed!";
return -1; return -1;
} }
@ -419,15 +357,12 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
{ {
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id); auto it = _stackMap.find(id);
if (it == _stackMap.end()) { if (it == _stackMap.end()) { return -2; }
return -2;
}
stack = it->second; stack = it->second;
} }
for (auto& p : (*params)) { for (auto& p : (*params)) {
if (!p) if (!p) continue;
continue;
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt); p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
} }
@ -435,8 +370,7 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
return 0; return 0;
} }
int VideoStackManager::stopVideoStack(const std::string& id) int VideoStackManager::stopVideoStack(const std::string& id) {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
auto it = _stackMap.find(id); auto it = _stackMap.find(id);
if (it != _stackMap.end()) { if (it != _stackMap.end()) {
@ -447,24 +381,24 @@ int VideoStackManager::stopVideoStack(const std::string& id)
return -1; return -1;
} }
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg() mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg() { return _bgImg; }
{
return _bgImg; template<typename T> T getJsonValue(const Json::Value& json, const std::string& key) {
if (!json.isMember(key)) {
throw Json::LogicError("VideoStack parseParams missing required field: " + key);
}
return json[key].as<T>();
} }
Params VideoStackManager::parseParams(const Json::Value& json, Params VideoStackManager::parseParams(const Json::Value& json, std::string& id, int& width,
std::string& id, int& height) {
int& width,
int& height)
{
try {
id = json["id"].asString();
width = json["width"].asInt(); id = getJsonValue<std::string>(json, "id");
height = json["height"].asInt(); width = getJsonValue<int>(json, "width");
height = getJsonValue<int>(json, "height");
int rows = getJsonValue<int>(json, "row");// 行数
int cols = getJsonValue<int>(json, "col");// 列数
int rows = json["row"].asInt(); //堆叠行数
int cols = json["col"].asInt(); //堆叠列数
float gapv = json["gapv"].asFloat();// 垂直间距 float gapv = json["gapv"].asFloat();// 垂直间距
float gaph = json["gaph"].asFloat();// 水平间距 float gaph = json["gaph"].asFloat();// 水平间距
@ -494,30 +428,32 @@ Params VideoStackManager::parseParams(const Json::Value& json,
} }
// 判断是否需要合并格子 (焦点屏) // 判断是否需要合并格子 (焦点屏)
if (!json["span"].empty() && json.isMember("span")) { if (json.isMember("span") && json["span"].isArray() && json["span"].size() > 0) {
for (const auto& subArray : json["span"]) { for (const auto& subArray : json["span"]) {
if (!subArray.isArray() || subArray.size() != 2) { if (!subArray.isArray() || subArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' sub-array format in JSON"); throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
} }
std::array<int, 4> mergePos; std::array<int, 4> mergePos;
int index = 0; unsigned int index = 0;
for (const auto& innerArray : subArray) { for (const auto& innerArray : subArray) {
if (!innerArray.isArray() || innerArray.size() != 2) { if (!innerArray.isArray() || innerArray.size() != 2) {
throw Json::LogicError("Incorrect 'span' inner-array format in JSON"); throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
} }
for (const auto& number : innerArray) { for (const auto& number : innerArray) {
if (index < mergePos.size()) { if (index < mergePos.size()) { mergePos[index++] = number.asInt(); }
mergePos[index++] = number.asInt();
}
} }
} }
for (int i = mergePos[0]; i <= mergePos[2]; i++) { for (int i = mergePos[0]; i <= mergePos[2]; i++) {
for (int j = mergePos[1]; j <= mergePos[3]; j++) { for (int j = mergePos[1]; j <= mergePos[3]; j++) {
if (i == mergePos[0] && j == mergePos[1]) { if (i == mergePos[0] && j == mergePos[1]) {
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix; (*params)[i * cols + j]->width =
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix; (mergePos[3] - mergePos[1] + 1) * gridWidth +
(mergePos[3] - mergePos[1]) * gapvPix;
(*params)[i * cols + j]->height =
(mergePos[2] - mergePos[0] + 1) * gridHeight +
(mergePos[2] - mergePos[0]) * gaphPix;
} else { } else {
(*params)[i * cols + j] = nullptr; (*params)[i * cols + j] = nullptr;
} }
@ -526,14 +462,9 @@ Params VideoStackManager::parseParams(const Json::Value& json,
} }
} }
return params; return params;
} catch (const std::exception& e) {
ErrorL << "Videostack parse params failed! " << e.what();
return nullptr;
}
} }
bool VideoStackManager::loadBgImg(const std::string& path) bool VideoStackManager::loadBgImg(const std::string& path) {
{
_bgImg = std::make_shared<mediakit::FFmpegFrame>(); _bgImg = std::make_shared<mediakit::FFmpegFrame>();
_bgImg->get()->width = 1280; _bgImg->get()->width = 1280;
@ -543,21 +474,21 @@ bool VideoStackManager::loadBgImg(const std::string& path)
av_frame_get_buffer(_bgImg->get(), 32); av_frame_get_buffer(_bgImg->get(), 32);
std::ifstream file(path, std::ios::binary); std::ifstream file(path, std::ios::binary);
if (!file.is_open()) { if (!file.is_open()) { return false; }
return false;
}
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y file.read((char*)_bgImg->get()->data[0],
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U _bgImg->get()->linesize[0] * _bgImg->get()->height);// Y
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V file.read((char*)_bgImg->get()->data[1],
_bgImg->get()->linesize[1] * _bgImg->get()->height / 2);// U
file.read((char*)_bgImg->get()->data[2],
_bgImg->get()->linesize[2] * _bgImg->get()->height / 2);// V
return true; return true;
} }
Channel::Ptr VideoStackManager::createChannel(const std::string& id, void VideoStackManager::clear() { _stackMap.clear(); }
int width,
int height, Channel::Ptr VideoStackManager::createChannel(const std::string& id, int width, int height,
AVPixelFormat pixfmt) AVPixelFormat pixfmt) {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
StackPlayer::Ptr player; StackPlayer::Ptr player;
@ -568,24 +499,24 @@ Channel::Ptr VideoStackManager::createChannel(const std::string& id,
player = createPlayer(id); player = createPlayer(id);
} }
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt)); auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(
std::make_shared<Channel>(id, width, height, pixfmt));
auto chn = refChn->acquire(); auto chn = refChn->acquire();
player->addChannel(chn); player->addChannel(chn);
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn; _channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] =
refChn;
return chn; return chn;
} }
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id) StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id) {
{
std::lock_guard<std::recursive_mutex> lock(_mx); std::lock_guard<std::recursive_mutex> lock(_mx);
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id)); auto refPlayer =
std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
_playerMap[id] = refPlayer; _playerMap[id] = refPlayer;
auto player = refPlayer->acquire(); auto player = refPlayer->acquire();
if (!id.empty()) { if (!id.empty()) { player->play(); }
player->play();
}
return player; return player;
} }

View File

@ -5,20 +5,14 @@
#include "Player/MediaPlayer.h" #include "Player/MediaPlayer.h"
#include "json/json.h" #include "json/json.h"
#include <mutex> #include <mutex>
template <typename T> template<typename T> class RefWrapper {
class RefWrapper {
public: public:
using Ptr = std::shared_ptr<RefWrapper<T>>; using Ptr = std::shared_ptr<RefWrapper<T>>;
template<typename... Args> template<typename... Args>
explicit RefWrapper(Args&&... args) explicit RefWrapper(Args&&... args) : _rc(0), _entity(std::forward<Args>(args)...) {}
: _rc(0)
, _entity(std::forward<Args>(args)...)
{
}
T acquire() T acquire() {
{
++_rc; ++_rc;
return _entity; return _entity;
} }
@ -26,8 +20,8 @@ class RefWrapper {
bool dispose() { return --_rc <= 0; } bool dispose() { return --_rc <= 0; }
private: private:
T _entity;
std::atomic<int> _rc; std::atomic<int> _rc;
T _entity;
}; };
class Channel; class Channel;
@ -87,10 +81,7 @@ class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public: public:
using Ptr = std::shared_ptr<StackPlayer>; using Ptr = std::shared_ptr<StackPlayer>;
StackPlayer(const std::string& url) StackPlayer(const std::string& url) : _url(url) {}
: _url(url)
{
}
void addChannel(const std::weak_ptr<Channel>& chn); void addChannel(const std::weak_ptr<Channel>& chn);
@ -119,11 +110,8 @@ class VideoStack {
public: public:
using Ptr = std::shared_ptr<VideoStack>; using Ptr = std::shared_ptr<VideoStack>;
VideoStack(const std::string& url, VideoStack(const std::string& url, int width = 1920, int height = 1080,
int width = 1920, AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P, float fps = 25.0,
int height = 1080,
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
float fps = 25.0,
int bitRate = 2 * 1024 * 1024); int bitRate = 2 * 1024 * 1024);
~VideoStack(); ~VideoStack();
@ -157,39 +145,33 @@ class VideoStack {
class VideoStackManager { class VideoStackManager {
public: public:
static VideoStackManager& Instance(); // 创建拼接流
Channel::Ptr getChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
void unrefChannel(const std::string& id,
int width,
int height,
AVPixelFormat pixfmt);
int startVideoStack(const Json::Value& json); int startVideoStack(const Json::Value& json);
int resetVideoStack(const Json::Value& json); // 停止拼接流
int stopVideoStack(const std::string& id); int stopVideoStack(const std::string& id);
// 可以在不断流的情况下,修改拼接流的配置(实现切换拼接屏内容)
int resetVideoStack(const Json::Value& json);
public:
static VideoStackManager& Instance();
Channel::Ptr getChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
void unrefChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
bool loadBgImg(const std::string& path); bool loadBgImg(const std::string& path);
void clear();
mediakit::FFmpegFrame::Ptr getBgImg(); mediakit::FFmpegFrame::Ptr getBgImg();
protected: protected:
Params parseParams(const Json::Value& json, Params parseParams(const Json::Value& json, std::string& id, int& width, int& height);
std::string& id,
int& width,
int& height);
protected: protected:
Channel::Ptr createChannel(const std::string& id, Channel::Ptr createChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
int width,
int height,
AVPixelFormat pixfmt);
StackPlayer::Ptr createPlayer(const std::string& id); StackPlayer::Ptr createPlayer(const std::string& id);

View File

@ -8,6 +8,7 @@
* may be found in the AUTHORS file in the root of the source tree. * may be found in the AUTHORS file in the root of the source tree.
*/ */
#include <exception>
#include <sys/stat.h> #include <sys/stat.h>
#include <math.h> #include <math.h>
#include <signal.h> #include <signal.h>
@ -1950,9 +1951,29 @@ void installWebApi() {
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) { api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET(); CHECK_SECRET();
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args); int ret = 0;
try {
ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
val["code"] = ret; val["code"] = ret;
val["msg"] = ret ? "failed" : "success"; val["msg"] = ret ? "failed" : "success";
} catch (const std::exception &e) {
val["code"] = -1;
val["msg"] = e.what();
}
invoker(200, headerOut, val.toStyledString());
});
api_regist("/index/api/stack/reset", [](API_ARGS_JSON_ASYNC) {
CHECK_SECRET();
int ret = 0;
try {
auto ret = VideoStackManager::Instance().resetVideoStack(allArgs.args);
val["code"] = ret;
val["msg"] = ret ? "failed" : "success";
} catch (const std::exception &e) {
val["code"] = -1;
val["msg"] = e.what();
}
invoker(200, headerOut, val.toStyledString()); invoker(200, headerOut, val.toStyledString());
}); });
@ -1974,6 +1995,9 @@ void unInstallWebApi(){
#if defined(ENABLE_RTPPROXY) #if defined(ENABLE_RTPPROXY)
s_rtp_server.clear(); s_rtp_server.clear();
#endif #endif
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_FFMPEG) && defined(ENABLE_X264)
VideoStackManager::Instance().clear();
#endif
NoticeCenter::Instance().delListener(&web_api_tag); NoticeCenter::Instance().delListener(&web_api_tag);
} }