mirror of
https://github.com/ZLMediaKit/ZLMediaKit.git
synced 2024-11-22 18:50:20 +08:00
perf: 调整VideoStack相关代码 (#3642)
1. 修复编译宏不生效问题 2. 新增reset接口,用于不断流的情况下变更拼接参数
This commit is contained in:
parent
efc683228c
commit
2f6723f602
@ -452,6 +452,15 @@ elseif(NOT ANDROID OR IOS)
|
|||||||
update_cached_list(MK_LINK_LIBRARIES pthread)
|
update_cached_list(MK_LINK_LIBRARIES pthread)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
if(ENABLE_VIDEOSTACK)
|
||||||
|
if(ENABLE_FFMPEG AND ENABLE_X264)
|
||||||
|
message(STATUS "ENABLE_VIDEOSTACK defined")
|
||||||
|
update_cached_list(MK_COMPILE_DEFINITIONS ENABLE_VIDEOSTACK)
|
||||||
|
else()
|
||||||
|
message(WARNING "ENABLE_VIDEOSTACK requires ENABLE_FFMPEG and ENABLE_X264")
|
||||||
|
endif ()
|
||||||
|
endif ()
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
# Solution folders:
|
# Solution folders:
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
@ -18,23 +18,15 @@
|
|||||||
|
|
||||||
// ITU-R BT.709
|
// ITU-R BT.709
|
||||||
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
|
#define RGB_TO_Y(R, G, B) (((47 * (R) + 157 * (G) + 16 * (B) + 128) >> 8) + 16)
|
||||||
#define RGB_TO_U(R, G, B) (((-26 * (R)-87 * (G) + 112 * (B) + 128) >> 8) + 128)
|
#define RGB_TO_U(R, G, B) (((-26 * (R) - 87 * (G) + 112 * (B) + 128) >> 8) + 128)
|
||||||
#define RGB_TO_V(R, G, B) (((112 * (R)-102 * (G)-10 * (B) + 128) >> 8) + 128)
|
#define RGB_TO_V(R, G, B) (((112 * (R) - 102 * (G) - 10 * (B) + 128) >> 8) + 128)
|
||||||
|
|
||||||
INSTANCE_IMP(VideoStackManager)
|
INSTANCE_IMP(VideoStackManager)
|
||||||
|
|
||||||
Param::~Param()
|
Param::~Param() { VideoStackManager::Instance().unrefChannel(id, width, height, pixfmt); }
|
||||||
{
|
|
||||||
VideoStackManager::Instance().unrefChannel(
|
|
||||||
id, width, height, pixfmt);
|
|
||||||
}
|
|
||||||
|
|
||||||
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
|
Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt)
|
||||||
: _id(id)
|
: _id(id), _width(width), _height(height), _pixfmt(pixfmt) {
|
||||||
, _width(width)
|
|
||||||
, _height(height)
|
|
||||||
, _pixfmt(pixfmt)
|
|
||||||
{
|
|
||||||
_tmp = std::make_shared<mediakit::FFmpegFrame>();
|
_tmp = std::make_shared<mediakit::FFmpegFrame>();
|
||||||
|
|
||||||
_tmp->get()->width = _width;
|
_tmp->get()->width = _width;
|
||||||
@ -53,88 +45,72 @@ Channel::Channel(const std::string& id, int width, int height, AVPixelFormat pix
|
|||||||
_tmp = _sws->inputFrame(frame);
|
_tmp = _sws->inputFrame(frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Channel::addParam(const std::weak_ptr<Param>& p)
|
void Channel::addParam(const std::weak_ptr<Param>& p) {
|
||||||
{
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
_params.push_back(p);
|
_params.push_back(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
|
void Channel::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
|
||||||
{
|
|
||||||
std::weak_ptr<Channel> weakSelf = shared_from_this();
|
std::weak_ptr<Channel> weakSelf = shared_from_this();
|
||||||
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
|
_poller = _poller ? _poller : toolkit::WorkThreadPool::Instance().getPoller();
|
||||||
_poller->async([weakSelf, frame]() {
|
_poller->async([weakSelf, frame]() {
|
||||||
auto self = weakSelf.lock();
|
auto self = weakSelf.lock();
|
||||||
if (!self) {
|
if (!self) { return; }
|
||||||
return;
|
|
||||||
}
|
|
||||||
self->_tmp = self->_sws->inputFrame(frame);
|
self->_tmp = self->_sws->inputFrame(frame);
|
||||||
|
|
||||||
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
|
self->forEachParam([self](const Param::Ptr& p) { self->fillBuffer(p); });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func)
|
void Channel::forEachParam(const std::function<void(const Param::Ptr&)>& func) {
|
||||||
{
|
|
||||||
for (auto& wp : _params) {
|
for (auto& wp : _params) {
|
||||||
if (auto sp = wp.lock()) {
|
if (auto sp = wp.lock()) { func(sp); }
|
||||||
func(sp);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Channel::fillBuffer(const Param::Ptr& p)
|
void Channel::fillBuffer(const Param::Ptr& p) {
|
||||||
{
|
if (auto buf = p->weak_buf.lock()) { copyData(buf, p); }
|
||||||
if (auto buf = p->weak_buf.lock()) {
|
|
||||||
copyData(buf, p);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p)
|
void Channel::copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p) {
|
||||||
{
|
|
||||||
|
|
||||||
switch (p->pixfmt) {
|
switch (p->pixfmt) {
|
||||||
case AV_PIX_FMT_YUV420P: {
|
case AV_PIX_FMT_YUV420P: {
|
||||||
for (int i = 0; i < p->height; i++) {
|
for (int i = 0; i < p->height; i++) {
|
||||||
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
|
memcpy(buf->get()->data[0] + buf->get()->linesize[0] * (i + p->posY) + p->posX,
|
||||||
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i,
|
_tmp->get()->data[0] + _tmp->get()->linesize[0] * i, _tmp->get()->width);
|
||||||
_tmp->get()->width);
|
}
|
||||||
}
|
// 确保height为奇数时,也能正确的复制到最后一行uv数据
|
||||||
//确保height为奇数时,也能正确的复制到最后一行uv数据
|
for (int i = 0; i < (p->height + 1) / 2; i++) {
|
||||||
for (int i = 0; i < (p->height + 1) / 2; i++) {
|
// U平面
|
||||||
// U平面
|
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) +
|
||||||
memcpy(buf->get()->data[1] + buf->get()->linesize[1] * (i + p->posY / 2) + p->posX / 2,
|
p->posX / 2,
|
||||||
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i,
|
_tmp->get()->data[1] + _tmp->get()->linesize[1] * i, _tmp->get()->width / 2);
|
||||||
_tmp->get()->width / 2);
|
|
||||||
|
|
||||||
// V平面
|
// V平面
|
||||||
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) + p->posX / 2,
|
memcpy(buf->get()->data[2] + buf->get()->linesize[2] * (i + p->posY / 2) +
|
||||||
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i,
|
p->posX / 2,
|
||||||
_tmp->get()->width / 2);
|
_tmp->get()->data[2] + _tmp->get()->linesize[2] * i, _tmp->get()->width / 2);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case AV_PIX_FMT_NV12: {
|
||||||
|
// TODO: 待实现
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
break;
|
|
||||||
}
|
|
||||||
case AV_PIX_FMT_NV12: {
|
|
||||||
//TODO: 待实现
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
default: WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt); break;
|
||||||
WarnL << "No support pixformat: " << av_get_pix_fmt_name(p->pixfmt);
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn)
|
void StackPlayer::addChannel(const std::weak_ptr<Channel>& chn) {
|
||||||
{
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
_channels.push_back(chn);
|
_channels.push_back(chn);
|
||||||
}
|
}
|
||||||
|
|
||||||
void StackPlayer::play()
|
void StackPlayer::play() {
|
||||||
{
|
|
||||||
|
|
||||||
auto url = _url;
|
auto url = _url;
|
||||||
//创建拉流 解码对象
|
// 创建拉流 解码对象
|
||||||
_player = std::make_shared<mediakit::MediaPlayer>();
|
_player = std::make_shared<mediakit::MediaPlayer>();
|
||||||
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
|
std::weak_ptr<mediakit::MediaPlayer> weakPlayer = _player;
|
||||||
|
|
||||||
@ -146,13 +122,9 @@ void StackPlayer::play()
|
|||||||
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
|
_player->setOnPlayResult([weakPlayer, weakSelf, url](const toolkit::SockException& ex) mutable {
|
||||||
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
|
TraceL << "StackPlayer: " << url << " OnPlayResult: " << ex.what();
|
||||||
auto strongPlayer = weakPlayer.lock();
|
auto strongPlayer = weakPlayer.lock();
|
||||||
if (!strongPlayer) {
|
if (!strongPlayer) { return; }
|
||||||
return;
|
|
||||||
}
|
|
||||||
auto self = weakSelf.lock();
|
auto self = weakSelf.lock();
|
||||||
if (!self) {
|
if (!self) { return; }
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!ex) {
|
if (!ex) {
|
||||||
// 取消定时器
|
// 取消定时器
|
||||||
@ -164,19 +136,18 @@ void StackPlayer::play()
|
|||||||
self->rePlay(url);
|
self->rePlay(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(strongPlayer->getTrack(mediakit::TrackVideo, false));
|
auto videoTrack = std::dynamic_pointer_cast<mediakit::VideoTrack>(
|
||||||
//auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
|
strongPlayer->getTrack(mediakit::TrackVideo, false));
|
||||||
|
// auto audioTrack = std::dynamic_pointer_cast<mediakit::AudioTrack>(strongPlayer->getTrack(mediakit::TrackAudio, false));
|
||||||
|
|
||||||
if (videoTrack) {
|
if (videoTrack) {
|
||||||
//TODO:添加使用显卡还是cpu解码的判断逻辑
|
// TODO:添加使用显卡还是cpu解码的判断逻辑
|
||||||
//auto decoder = std::make_shared<FFmpegDecoder>(videoTrack, 1, std::vector<std::string>{ "hevc_cuvid", "h264_cuvid"});
|
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(
|
||||||
auto decoder = std::make_shared<mediakit::FFmpegDecoder>(videoTrack, 0, std::vector<std::string> { "h264", "hevc" });
|
videoTrack, 0, std::vector<std::string>{"h264", "hevc"});
|
||||||
|
|
||||||
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
|
decoder->setOnDecode([weakSelf](const mediakit::FFmpegFrame::Ptr& frame) mutable {
|
||||||
auto self = weakSelf.lock();
|
auto self = weakSelf.lock();
|
||||||
if (!self) {
|
if (!self) { return; }
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self->onFrame(frame);
|
self->onFrame(frame);
|
||||||
});
|
});
|
||||||
@ -190,14 +161,10 @@ void StackPlayer::play()
|
|||||||
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
|
_player->setOnShutdown([weakPlayer, url, weakSelf](const toolkit::SockException& ex) {
|
||||||
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
|
TraceL << "StackPlayer: " << url << " OnShutdown: " << ex.what();
|
||||||
auto strongPlayer = weakPlayer.lock();
|
auto strongPlayer = weakPlayer.lock();
|
||||||
if (!strongPlayer) {
|
if (!strongPlayer) { return; }
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto self = weakSelf.lock();
|
auto self = weakSelf.lock();
|
||||||
if (!self) {
|
if (!self) { return; }
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self->onDisconnect();
|
self->onDisconnect();
|
||||||
|
|
||||||
@ -207,18 +174,14 @@ void StackPlayer::play()
|
|||||||
_player->play(url);
|
_player->play(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame)
|
void StackPlayer::onFrame(const mediakit::FFmpegFrame::Ptr& frame) {
|
||||||
{
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
for (auto& weak_chn : _channels) {
|
for (auto& weak_chn : _channels) {
|
||||||
if (auto chn = weak_chn.lock()) {
|
if (auto chn = weak_chn.lock()) { chn->onFrame(frame); }
|
||||||
chn->onFrame(frame);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void StackPlayer::onDisconnect()
|
void StackPlayer::onDisconnect() {
|
||||||
{
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
for (auto& weak_chn : _channels) {
|
for (auto& weak_chn : _channels) {
|
||||||
if (auto chn = weak_chn.lock()) {
|
if (auto chn = weak_chn.lock()) {
|
||||||
@ -228,31 +191,22 @@ void StackPlayer::onDisconnect()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void StackPlayer::rePlay(const std::string& url)
|
void StackPlayer::rePlay(const std::string& url) {
|
||||||
{
|
|
||||||
_failedCount++;
|
_failedCount++;
|
||||||
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000)); //步进延迟 重试间隔
|
auto delay = MAX(2 * 1000, MIN(_failedCount * 3 * 1000, 60 * 1000));// 步进延迟 重试间隔
|
||||||
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
|
std::weak_ptr<StackPlayer> weakSelf = shared_from_this();
|
||||||
_timer = std::make_shared<toolkit::Timer>(
|
_timer = std::make_shared<toolkit::Timer>(delay / 1000.0f, [weakSelf, url]() {
|
||||||
delay / 1000.0f, [weakSelf, url]() {
|
auto self = weakSelf.lock();
|
||||||
auto self = weakSelf.lock();
|
if (!self) {}
|
||||||
if (!self) {
|
WarnL << "replay [" << self->_failedCount << "]:" << url;
|
||||||
}
|
self->_player->play(url);
|
||||||
WarnL << "replay [" << self->_failedCount << "]:" << url;
|
return false;
|
||||||
self->_player->play(url);
|
}, nullptr);
|
||||||
return false;
|
|
||||||
},
|
|
||||||
nullptr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt, float fps, int bitRate)
|
VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelFormat pixfmt,
|
||||||
: _id(id)
|
float fps, int bitRate)
|
||||||
, _width(width)
|
: _id(id), _width(width), _height(height), _pixfmt(pixfmt), _fps(fps), _bitRate(bitRate) {
|
||||||
, _height(height)
|
|
||||||
, _pixfmt(pixfmt)
|
|
||||||
, _fps(fps)
|
|
||||||
, _bitRate(bitRate)
|
|
||||||
{
|
|
||||||
|
|
||||||
_buffer = std::make_shared<mediakit::FFmpegFrame>();
|
_buffer = std::make_shared<mediakit::FFmpegFrame>();
|
||||||
|
|
||||||
@ -262,7 +216,8 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
|
|||||||
|
|
||||||
av_frame_get_buffer(_buffer->get(), 32);
|
av_frame_get_buffer(_buffer->get(), 32);
|
||||||
|
|
||||||
_dev = std::make_shared<mediakit::DevChannel>(mediakit::MediaTuple { DEFAULT_VHOST, "live", _id });
|
_dev = std::make_shared<mediakit::DevChannel>(
|
||||||
|
mediakit::MediaTuple{DEFAULT_VHOST, "live", _id, ""});
|
||||||
|
|
||||||
mediakit::VideoInfo info;
|
mediakit::VideoInfo info;
|
||||||
info.codecId = mediakit::CodecH264;
|
info.codecId = mediakit::CodecH264;
|
||||||
@ -272,34 +227,28 @@ VideoStack::VideoStack(const std::string& id, int width, int height, AVPixelForm
|
|||||||
info.iBitRate = _bitRate;
|
info.iBitRate = _bitRate;
|
||||||
|
|
||||||
_dev->initVideo(info);
|
_dev->initVideo(info);
|
||||||
//dev->initAudio(); //TODO:音频
|
// dev->initAudio(); //TODO:音频
|
||||||
_dev->addTrackCompleted();
|
_dev->addTrackCompleted();
|
||||||
|
|
||||||
_isExit = false;
|
_isExit = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoStack::~VideoStack()
|
VideoStack::~VideoStack() {
|
||||||
{
|
|
||||||
_isExit = true;
|
_isExit = true;
|
||||||
if (_thread.joinable()) {
|
if (_thread.joinable()) { _thread.join(); }
|
||||||
_thread.join();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoStack::setParam(const Params& params)
|
void VideoStack::setParam(const Params& params) {
|
||||||
{
|
|
||||||
if (_params) {
|
if (_params) {
|
||||||
for (auto& p : (*_params)) {
|
for (auto& p : (*_params)) {
|
||||||
if (!p)
|
if (!p) continue;
|
||||||
continue;
|
|
||||||
p->weak_buf.reset();
|
p->weak_buf.reset();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
initBgColor();
|
initBgColor();
|
||||||
for (auto& p : (*params)) {
|
for (auto& p : (*params)) {
|
||||||
if (!p)
|
if (!p) continue;
|
||||||
continue;
|
|
||||||
p->weak_buf = _buffer;
|
p->weak_buf = _buffer;
|
||||||
if (auto chn = p->weak_chn.lock()) {
|
if (auto chn = p->weak_chn.lock()) {
|
||||||
chn->addParam(p);
|
chn->addParam(p);
|
||||||
@ -309,14 +258,14 @@ void VideoStack::setParam(const Params& params)
|
|||||||
_params = params;
|
_params = params;
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoStack::start()
|
void VideoStack::start() {
|
||||||
{
|
|
||||||
_thread = std::thread([&]() {
|
_thread = std::thread([&]() {
|
||||||
uint64_t pts = 0;
|
uint64_t pts = 0;
|
||||||
int frameInterval = 1000 / _fps;
|
int frameInterval = 1000 / _fps;
|
||||||
auto lastEncTP = std::chrono::steady_clock::now();
|
auto lastEncTP = std::chrono::steady_clock::now();
|
||||||
while (!_isExit) {
|
while (!_isExit) {
|
||||||
if (std::chrono::steady_clock::now() - lastEncTP > std::chrono::milliseconds(frameInterval)) {
|
if (std::chrono::steady_clock::now() - lastEncTP >
|
||||||
|
std::chrono::milliseconds(frameInterval)) {
|
||||||
lastEncTP = std::chrono::steady_clock::now();
|
lastEncTP = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
|
_dev->inputYUV((char**)_buffer->get()->data, _buffer->get()->linesize, pts);
|
||||||
@ -326,9 +275,8 @@ void VideoStack::start()
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoStack::initBgColor()
|
void VideoStack::initBgColor() {
|
||||||
{
|
// 填充底色
|
||||||
//填充底色
|
|
||||||
auto R = 20;
|
auto R = 20;
|
||||||
auto G = 20;
|
auto G = 20;
|
||||||
auto B = 20;
|
auto B = 20;
|
||||||
@ -342,27 +290,19 @@ void VideoStack::initBgColor()
|
|||||||
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
|
memset(_buffer->get()->data[2], V, _buffer->get()->linesize[2] * _height / 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
Channel::Ptr VideoStackManager::getChannel(const std::string& id,
|
Channel::Ptr VideoStackManager::getChannel(const std::string& id, int width, int height,
|
||||||
int width,
|
AVPixelFormat pixfmt) {
|
||||||
int height,
|
|
||||||
AVPixelFormat pixfmt)
|
|
||||||
{
|
|
||||||
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
||||||
auto it = _channelMap.find(key);
|
auto it = _channelMap.find(key);
|
||||||
if (it != _channelMap.end()) {
|
if (it != _channelMap.end()) { return it->second->acquire(); }
|
||||||
return it->second->acquire();
|
|
||||||
}
|
|
||||||
|
|
||||||
return createChannel(id, width, height, pixfmt);
|
return createChannel(id, width, height, pixfmt);
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoStackManager::unrefChannel(const std::string& id,
|
void VideoStackManager::unrefChannel(const std::string& id, int width, int height,
|
||||||
int width,
|
AVPixelFormat pixfmt) {
|
||||||
int height,
|
|
||||||
AVPixelFormat pixfmt)
|
|
||||||
{
|
|
||||||
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
auto key = id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt);
|
||||||
@ -377,8 +317,7 @@ void VideoStackManager::unrefChannel(const std::string& id,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoStackManager::startVideoStack(const Json::Value& json)
|
int VideoStackManager::startVideoStack(const Json::Value& json) {
|
||||||
{
|
|
||||||
|
|
||||||
std::string id;
|
std::string id;
|
||||||
int width, height;
|
int width, height;
|
||||||
@ -392,8 +331,7 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
|
|||||||
auto stack = std::make_shared<VideoStack>(id, width, height);
|
auto stack = std::make_shared<VideoStack>(id, width, height);
|
||||||
|
|
||||||
for (auto& p : (*params)) {
|
for (auto& p : (*params)) {
|
||||||
if (!p)
|
if (!p) continue;
|
||||||
continue;
|
|
||||||
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -405,13 +343,13 @@ int VideoStackManager::startVideoStack(const Json::Value& json)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoStackManager::resetVideoStack(const Json::Value& json)
|
int VideoStackManager::resetVideoStack(const Json::Value& json) {
|
||||||
{
|
|
||||||
std::string id;
|
std::string id;
|
||||||
int width, height;
|
int width, height;
|
||||||
auto params = parseParams(json, id, width, height);
|
auto params = parseParams(json, id, width, height);
|
||||||
|
|
||||||
if (!params) {
|
if (!params) {
|
||||||
|
ErrorL << "Videostack parse params failed!";
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -419,15 +357,12 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
|
|||||||
{
|
{
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
auto it = _stackMap.find(id);
|
auto it = _stackMap.find(id);
|
||||||
if (it == _stackMap.end()) {
|
if (it == _stackMap.end()) { return -2; }
|
||||||
return -2;
|
|
||||||
}
|
|
||||||
stack = it->second;
|
stack = it->second;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto& p : (*params)) {
|
for (auto& p : (*params)) {
|
||||||
if (!p)
|
if (!p) continue;
|
||||||
continue;
|
|
||||||
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
p->weak_chn = getChannel(p->id, p->width, p->height, p->pixfmt);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -435,8 +370,7 @@ int VideoStackManager::resetVideoStack(const Json::Value& json)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VideoStackManager::stopVideoStack(const std::string& id)
|
int VideoStackManager::stopVideoStack(const std::string& id) {
|
||||||
{
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
auto it = _stackMap.find(id);
|
auto it = _stackMap.find(id);
|
||||||
if (it != _stackMap.end()) {
|
if (it != _stackMap.end()) {
|
||||||
@ -447,93 +381,90 @@ int VideoStackManager::stopVideoStack(const std::string& id)
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg()
|
mediakit::FFmpegFrame::Ptr VideoStackManager::getBgImg() { return _bgImg; }
|
||||||
{
|
|
||||||
return _bgImg;
|
|
||||||
}
|
|
||||||
|
|
||||||
Params VideoStackManager::parseParams(const Json::Value& json,
|
template<typename T> T getJsonValue(const Json::Value& json, const std::string& key) {
|
||||||
std::string& id,
|
if (!json.isMember(key)) {
|
||||||
int& width,
|
throw Json::LogicError("VideoStack parseParams missing required field: " + key);
|
||||||
int& height)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
id = json["id"].asString();
|
|
||||||
|
|
||||||
width = json["width"].asInt();
|
|
||||||
height = json["height"].asInt();
|
|
||||||
|
|
||||||
int rows = json["row"].asInt(); //堆叠行数
|
|
||||||
int cols = json["col"].asInt(); //堆叠列数
|
|
||||||
float gapv = json["gapv"].asFloat(); //垂直间距
|
|
||||||
float gaph = json["gaph"].asFloat(); //水平间距
|
|
||||||
|
|
||||||
//单个间距
|
|
||||||
int gaphPix = static_cast<int>(round(width * gaph));
|
|
||||||
int gapvPix = static_cast<int>(round(height * gapv));
|
|
||||||
|
|
||||||
// 根据间距计算格子宽高
|
|
||||||
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
|
|
||||||
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
|
|
||||||
|
|
||||||
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
|
|
||||||
|
|
||||||
for (int row = 0; row < rows; row++) {
|
|
||||||
for (int col = 0; col < cols; col++) {
|
|
||||||
std::string url = json["url"][row][col].asString();
|
|
||||||
|
|
||||||
auto param = std::make_shared<Param>();
|
|
||||||
param->posX = gridWidth * col + col * gaphPix;
|
|
||||||
param->posY = gridHeight * row + row * gapvPix;
|
|
||||||
param->width = gridWidth;
|
|
||||||
param->height = gridHeight;
|
|
||||||
param->id = url;
|
|
||||||
|
|
||||||
(*params)[row * cols + col] = param;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//判断是否需要合并格子 (焦点屏)
|
|
||||||
if (!json["span"].empty() && json.isMember("span")) {
|
|
||||||
for (const auto& subArray : json["span"]) {
|
|
||||||
if (!subArray.isArray() || subArray.size() != 2) {
|
|
||||||
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
|
|
||||||
}
|
|
||||||
std::array<int, 4> mergePos;
|
|
||||||
int index = 0;
|
|
||||||
|
|
||||||
for (const auto& innerArray : subArray) {
|
|
||||||
if (!innerArray.isArray() || innerArray.size() != 2) {
|
|
||||||
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
|
|
||||||
}
|
|
||||||
for (const auto& number : innerArray) {
|
|
||||||
if (index < mergePos.size()) {
|
|
||||||
mergePos[index++] = number.asInt();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
|
|
||||||
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
|
|
||||||
if (i == mergePos[0] && j == mergePos[1]) {
|
|
||||||
(*params)[i * cols + j]->width = (mergePos[3] - mergePos[1] + 1) * gridWidth + (mergePos[3] - mergePos[1]) * gapvPix;
|
|
||||||
(*params)[i * cols + j]->height = (mergePos[2] - mergePos[0] + 1) * gridHeight + (mergePos[2] - mergePos[0]) * gaphPix;
|
|
||||||
} else {
|
|
||||||
(*params)[i * cols + j] = nullptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
} catch (const std::exception& e) {
|
|
||||||
ErrorL << "Videostack parse params failed! " << e.what();
|
|
||||||
return nullptr;
|
|
||||||
}
|
}
|
||||||
|
return json[key].as<T>();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VideoStackManager::loadBgImg(const std::string& path)
|
Params VideoStackManager::parseParams(const Json::Value& json, std::string& id, int& width,
|
||||||
{
|
int& height) {
|
||||||
|
|
||||||
|
id = getJsonValue<std::string>(json, "id");
|
||||||
|
width = getJsonValue<int>(json, "width");
|
||||||
|
height = getJsonValue<int>(json, "height");
|
||||||
|
int rows = getJsonValue<int>(json, "row");// 行数
|
||||||
|
int cols = getJsonValue<int>(json, "col");// 列数
|
||||||
|
|
||||||
|
float gapv = json["gapv"].asFloat();// 垂直间距
|
||||||
|
float gaph = json["gaph"].asFloat();// 水平间距
|
||||||
|
|
||||||
|
// 单个间距
|
||||||
|
int gaphPix = static_cast<int>(round(width * gaph));
|
||||||
|
int gapvPix = static_cast<int>(round(height * gapv));
|
||||||
|
|
||||||
|
// 根据间距计算格子宽高
|
||||||
|
int gridWidth = cols > 1 ? (width - gaphPix * (cols - 1)) / cols : width;
|
||||||
|
int gridHeight = rows > 1 ? (height - gapvPix * (rows - 1)) / rows : height;
|
||||||
|
|
||||||
|
auto params = std::make_shared<std::vector<Param::Ptr>>(rows * cols);
|
||||||
|
|
||||||
|
for (int row = 0; row < rows; row++) {
|
||||||
|
for (int col = 0; col < cols; col++) {
|
||||||
|
std::string url = json["url"][row][col].asString();
|
||||||
|
|
||||||
|
auto param = std::make_shared<Param>();
|
||||||
|
param->posX = gridWidth * col + col * gaphPix;
|
||||||
|
param->posY = gridHeight * row + row * gapvPix;
|
||||||
|
param->width = gridWidth;
|
||||||
|
param->height = gridHeight;
|
||||||
|
param->id = url;
|
||||||
|
|
||||||
|
(*params)[row * cols + col] = param;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 判断是否需要合并格子 (焦点屏)
|
||||||
|
if (json.isMember("span") && json["span"].isArray() && json["span"].size() > 0) {
|
||||||
|
for (const auto& subArray : json["span"]) {
|
||||||
|
if (!subArray.isArray() || subArray.size() != 2) {
|
||||||
|
throw Json::LogicError("Incorrect 'span' sub-array format in JSON");
|
||||||
|
}
|
||||||
|
std::array<int, 4> mergePos;
|
||||||
|
unsigned int index = 0;
|
||||||
|
|
||||||
|
for (const auto& innerArray : subArray) {
|
||||||
|
if (!innerArray.isArray() || innerArray.size() != 2) {
|
||||||
|
throw Json::LogicError("Incorrect 'span' inner-array format in JSON");
|
||||||
|
}
|
||||||
|
for (const auto& number : innerArray) {
|
||||||
|
if (index < mergePos.size()) { mergePos[index++] = number.asInt(); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = mergePos[0]; i <= mergePos[2]; i++) {
|
||||||
|
for (int j = mergePos[1]; j <= mergePos[3]; j++) {
|
||||||
|
if (i == mergePos[0] && j == mergePos[1]) {
|
||||||
|
(*params)[i * cols + j]->width =
|
||||||
|
(mergePos[3] - mergePos[1] + 1) * gridWidth +
|
||||||
|
(mergePos[3] - mergePos[1]) * gapvPix;
|
||||||
|
(*params)[i * cols + j]->height =
|
||||||
|
(mergePos[2] - mergePos[0] + 1) * gridHeight +
|
||||||
|
(mergePos[2] - mergePos[0]) * gaphPix;
|
||||||
|
} else {
|
||||||
|
(*params)[i * cols + j] = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool VideoStackManager::loadBgImg(const std::string& path) {
|
||||||
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
|
_bgImg = std::make_shared<mediakit::FFmpegFrame>();
|
||||||
|
|
||||||
_bgImg->get()->width = 1280;
|
_bgImg->get()->width = 1280;
|
||||||
@ -543,21 +474,21 @@ bool VideoStackManager::loadBgImg(const std::string& path)
|
|||||||
av_frame_get_buffer(_bgImg->get(), 32);
|
av_frame_get_buffer(_bgImg->get(), 32);
|
||||||
|
|
||||||
std::ifstream file(path, std::ios::binary);
|
std::ifstream file(path, std::ios::binary);
|
||||||
if (!file.is_open()) {
|
if (!file.is_open()) { return false; }
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
file.read((char*)_bgImg->get()->data[0], _bgImg->get()->linesize[0] * _bgImg->get()->height); // Y
|
file.read((char*)_bgImg->get()->data[0],
|
||||||
file.read((char*)_bgImg->get()->data[1], _bgImg->get()->linesize[1] * _bgImg->get()->height / 2); // U
|
_bgImg->get()->linesize[0] * _bgImg->get()->height);// Y
|
||||||
file.read((char*)_bgImg->get()->data[2], _bgImg->get()->linesize[2] * _bgImg->get()->height / 2); // V
|
file.read((char*)_bgImg->get()->data[1],
|
||||||
|
_bgImg->get()->linesize[1] * _bgImg->get()->height / 2);// U
|
||||||
|
file.read((char*)_bgImg->get()->data[2],
|
||||||
|
_bgImg->get()->linesize[2] * _bgImg->get()->height / 2);// V
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
Channel::Ptr VideoStackManager::createChannel(const std::string& id,
|
void VideoStackManager::clear() { _stackMap.clear(); }
|
||||||
int width,
|
|
||||||
int height,
|
Channel::Ptr VideoStackManager::createChannel(const std::string& id, int width, int height,
|
||||||
AVPixelFormat pixfmt)
|
AVPixelFormat pixfmt) {
|
||||||
{
|
|
||||||
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
StackPlayer::Ptr player;
|
StackPlayer::Ptr player;
|
||||||
@ -568,24 +499,24 @@ Channel::Ptr VideoStackManager::createChannel(const std::string& id,
|
|||||||
player = createPlayer(id);
|
player = createPlayer(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(std::make_shared<Channel>(id, width, height, pixfmt));
|
auto refChn = std::make_shared<RefWrapper<Channel::Ptr>>(
|
||||||
|
std::make_shared<Channel>(id, width, height, pixfmt));
|
||||||
auto chn = refChn->acquire();
|
auto chn = refChn->acquire();
|
||||||
player->addChannel(chn);
|
player->addChannel(chn);
|
||||||
|
|
||||||
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] = refChn;
|
_channelMap[id + std::to_string(width) + std::to_string(height) + std::to_string(pixfmt)] =
|
||||||
|
refChn;
|
||||||
return chn;
|
return chn;
|
||||||
}
|
}
|
||||||
|
|
||||||
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id)
|
StackPlayer::Ptr VideoStackManager::createPlayer(const std::string& id) {
|
||||||
{
|
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mx);
|
std::lock_guard<std::recursive_mutex> lock(_mx);
|
||||||
auto refPlayer = std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
|
auto refPlayer =
|
||||||
|
std::make_shared<RefWrapper<StackPlayer::Ptr>>(std::make_shared<StackPlayer>(id));
|
||||||
_playerMap[id] = refPlayer;
|
_playerMap[id] = refPlayer;
|
||||||
|
|
||||||
auto player = refPlayer->acquire();
|
auto player = refPlayer->acquire();
|
||||||
if (!id.empty()) {
|
if (!id.empty()) { player->play(); }
|
||||||
player->play();
|
|
||||||
}
|
|
||||||
|
|
||||||
return player;
|
return player;
|
||||||
}
|
}
|
||||||
|
@ -5,29 +5,23 @@
|
|||||||
#include "Player/MediaPlayer.h"
|
#include "Player/MediaPlayer.h"
|
||||||
#include "json/json.h"
|
#include "json/json.h"
|
||||||
#include <mutex>
|
#include <mutex>
|
||||||
template <typename T>
|
template<typename T> class RefWrapper {
|
||||||
class RefWrapper {
|
public:
|
||||||
public:
|
|
||||||
using Ptr = std::shared_ptr<RefWrapper<T>>;
|
using Ptr = std::shared_ptr<RefWrapper<T>>;
|
||||||
|
|
||||||
template <typename... Args>
|
template<typename... Args>
|
||||||
explicit RefWrapper(Args&&... args)
|
explicit RefWrapper(Args&&... args) : _rc(0), _entity(std::forward<Args>(args)...) {}
|
||||||
: _rc(0)
|
|
||||||
, _entity(std::forward<Args>(args)...)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
T acquire()
|
T acquire() {
|
||||||
{
|
|
||||||
++_rc;
|
++_rc;
|
||||||
return _entity;
|
return _entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool dispose() { return --_rc <= 0; }
|
bool dispose() { return --_rc <= 0; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
T _entity;
|
|
||||||
std::atomic<int> _rc;
|
std::atomic<int> _rc;
|
||||||
|
T _entity;
|
||||||
};
|
};
|
||||||
|
|
||||||
class Channel;
|
class Channel;
|
||||||
@ -40,7 +34,7 @@ struct Param {
|
|||||||
int width = 0;
|
int width = 0;
|
||||||
int height = 0;
|
int height = 0;
|
||||||
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
|
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P;
|
||||||
std::string id {};
|
std::string id{};
|
||||||
|
|
||||||
// runtime
|
// runtime
|
||||||
std::weak_ptr<Channel> weak_chn;
|
std::weak_ptr<Channel> weak_chn;
|
||||||
@ -52,7 +46,7 @@ struct Param {
|
|||||||
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
|
using Params = std::shared_ptr<std::vector<Param::Ptr>>;
|
||||||
|
|
||||||
class Channel : public std::enable_shared_from_this<Channel> {
|
class Channel : public std::enable_shared_from_this<Channel> {
|
||||||
public:
|
public:
|
||||||
using Ptr = std::shared_ptr<Channel>;
|
using Ptr = std::shared_ptr<Channel>;
|
||||||
|
|
||||||
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
Channel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||||
@ -63,12 +57,12 @@ class Channel : public std::enable_shared_from_this<Channel> {
|
|||||||
|
|
||||||
void fillBuffer(const Param::Ptr& p);
|
void fillBuffer(const Param::Ptr& p);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
|
void forEachParam(const std::function<void(const Param::Ptr&)>& func);
|
||||||
|
|
||||||
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
|
void copyData(const mediakit::FFmpegFrame::Ptr& buf, const Param::Ptr& p);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string _id;
|
std::string _id;
|
||||||
int _width;
|
int _width;
|
||||||
int _height;
|
int _height;
|
||||||
@ -84,13 +78,10 @@ class Channel : public std::enable_shared_from_this<Channel> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
||||||
public:
|
public:
|
||||||
using Ptr = std::shared_ptr<StackPlayer>;
|
using Ptr = std::shared_ptr<StackPlayer>;
|
||||||
|
|
||||||
StackPlayer(const std::string& url)
|
StackPlayer(const std::string& url) : _url(url) {}
|
||||||
: _url(url)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
void addChannel(const std::weak_ptr<Channel>& chn);
|
void addChannel(const std::weak_ptr<Channel>& chn);
|
||||||
|
|
||||||
@ -100,14 +91,14 @@ class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
|||||||
|
|
||||||
void onDisconnect();
|
void onDisconnect();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void rePlay(const std::string& url);
|
void rePlay(const std::string& url);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string _url;
|
std::string _url;
|
||||||
mediakit::MediaPlayer::Ptr _player;
|
mediakit::MediaPlayer::Ptr _player;
|
||||||
|
|
||||||
//用于断线重连
|
// 用于断线重连
|
||||||
toolkit::Timer::Ptr _timer;
|
toolkit::Timer::Ptr _timer;
|
||||||
int _failedCount = 0;
|
int _failedCount = 0;
|
||||||
|
|
||||||
@ -116,15 +107,12 @@ class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
class VideoStack {
|
class VideoStack {
|
||||||
public:
|
public:
|
||||||
using Ptr = std::shared_ptr<VideoStack>;
|
using Ptr = std::shared_ptr<VideoStack>;
|
||||||
|
|
||||||
VideoStack(const std::string& url,
|
VideoStack(const std::string& url, int width = 1920, int height = 1080,
|
||||||
int width = 1920,
|
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P, float fps = 25.0,
|
||||||
int height = 1080,
|
int bitRate = 2 * 1024 * 1024);
|
||||||
AVPixelFormat pixfmt = AV_PIX_FMT_YUV420P,
|
|
||||||
float fps = 25.0,
|
|
||||||
int bitRate = 2 * 1024 * 1024);
|
|
||||||
|
|
||||||
~VideoStack();
|
~VideoStack();
|
||||||
|
|
||||||
@ -132,15 +120,15 @@ class VideoStack {
|
|||||||
|
|
||||||
void start();
|
void start();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void initBgColor();
|
void initBgColor();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Params _params;
|
Params _params;
|
||||||
|
|
||||||
mediakit::FFmpegFrame::Ptr _buffer;
|
mediakit::FFmpegFrame::Ptr _buffer;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string _id;
|
std::string _id;
|
||||||
int _width;
|
int _width;
|
||||||
int _height;
|
int _height;
|
||||||
@ -156,53 +144,47 @@ class VideoStack {
|
|||||||
};
|
};
|
||||||
|
|
||||||
class VideoStackManager {
|
class VideoStackManager {
|
||||||
public:
|
public:
|
||||||
static VideoStackManager& Instance();
|
// 创建拼接流
|
||||||
|
|
||||||
Channel::Ptr getChannel(const std::string& id,
|
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
AVPixelFormat pixfmt);
|
|
||||||
|
|
||||||
void unrefChannel(const std::string& id,
|
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
AVPixelFormat pixfmt);
|
|
||||||
|
|
||||||
int startVideoStack(const Json::Value& json);
|
int startVideoStack(const Json::Value& json);
|
||||||
|
|
||||||
|
// 停止拼接流
|
||||||
|
int stopVideoStack(const std::string& id);
|
||||||
|
|
||||||
|
// 可以在不断流的情况下,修改拼接流的配置(实现切换拼接屏内容)
|
||||||
int resetVideoStack(const Json::Value& json);
|
int resetVideoStack(const Json::Value& json);
|
||||||
|
|
||||||
int stopVideoStack(const std::string& id);
|
public:
|
||||||
|
static VideoStackManager& Instance();
|
||||||
|
|
||||||
|
Channel::Ptr getChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||||
|
|
||||||
|
void unrefChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||||
|
|
||||||
bool loadBgImg(const std::string& path);
|
bool loadBgImg(const std::string& path);
|
||||||
|
|
||||||
|
void clear();
|
||||||
|
|
||||||
mediakit::FFmpegFrame::Ptr getBgImg();
|
mediakit::FFmpegFrame::Ptr getBgImg();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
Params parseParams(const Json::Value& json,
|
Params parseParams(const Json::Value& json, std::string& id, int& width, int& height);
|
||||||
std::string& id,
|
|
||||||
int& width,
|
|
||||||
int& height);
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
Channel::Ptr createChannel(const std::string& id,
|
Channel::Ptr createChannel(const std::string& id, int width, int height, AVPixelFormat pixfmt);
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
AVPixelFormat pixfmt);
|
|
||||||
|
|
||||||
StackPlayer::Ptr createPlayer(const std::string& id);
|
StackPlayer::Ptr createPlayer(const std::string& id);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
mediakit::FFmpegFrame::Ptr _bgImg;
|
mediakit::FFmpegFrame::Ptr _bgImg;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::recursive_mutex _mx;
|
std::recursive_mutex _mx;
|
||||||
|
|
||||||
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
|
std::unordered_map<std::string, VideoStack::Ptr> _stackMap;
|
||||||
|
|
||||||
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
|
std::unordered_map<std::string, RefWrapper<Channel::Ptr>::Ptr> _channelMap;
|
||||||
|
|
||||||
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
|
std::unordered_map<std::string, RefWrapper<StackPlayer::Ptr>::Ptr> _playerMap;
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
@ -8,6 +8,7 @@
|
|||||||
* may be found in the AUTHORS file in the root of the source tree.
|
* may be found in the AUTHORS file in the root of the source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include <exception>
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <math.h>
|
#include <math.h>
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
@ -1950,9 +1951,29 @@ void installWebApi() {
|
|||||||
|
|
||||||
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
|
api_regist("/index/api/stack/start", [](API_ARGS_JSON_ASYNC) {
|
||||||
CHECK_SECRET();
|
CHECK_SECRET();
|
||||||
auto ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
|
int ret = 0;
|
||||||
val["code"] = ret;
|
try {
|
||||||
val["msg"] = ret ? "failed" : "success";
|
ret = VideoStackManager::Instance().startVideoStack(allArgs.args);
|
||||||
|
val["code"] = ret;
|
||||||
|
val["msg"] = ret ? "failed" : "success";
|
||||||
|
} catch (const std::exception &e) {
|
||||||
|
val["code"] = -1;
|
||||||
|
val["msg"] = e.what();
|
||||||
|
}
|
||||||
|
invoker(200, headerOut, val.toStyledString());
|
||||||
|
});
|
||||||
|
|
||||||
|
api_regist("/index/api/stack/reset", [](API_ARGS_JSON_ASYNC) {
|
||||||
|
CHECK_SECRET();
|
||||||
|
int ret = 0;
|
||||||
|
try {
|
||||||
|
auto ret = VideoStackManager::Instance().resetVideoStack(allArgs.args);
|
||||||
|
val["code"] = ret;
|
||||||
|
val["msg"] = ret ? "failed" : "success";
|
||||||
|
} catch (const std::exception &e) {
|
||||||
|
val["code"] = -1;
|
||||||
|
val["msg"] = e.what();
|
||||||
|
}
|
||||||
invoker(200, headerOut, val.toStyledString());
|
invoker(200, headerOut, val.toStyledString());
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -1974,6 +1995,9 @@ void unInstallWebApi(){
|
|||||||
#if defined(ENABLE_RTPPROXY)
|
#if defined(ENABLE_RTPPROXY)
|
||||||
s_rtp_server.clear();
|
s_rtp_server.clear();
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(ENABLE_VIDEOSTACK) && defined(ENABLE_FFMPEG) && defined(ENABLE_X264)
|
||||||
|
VideoStackManager::Instance().clear();
|
||||||
|
#endif
|
||||||
|
|
||||||
NoticeCenter::Instance().delListener(&web_api_tag);
|
NoticeCenter::Instance().delListener(&web_api_tag);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user