update: 等待每个视频流拿到1s的数据再开始编码(待实现 当输入视频流跟要输出视频帧率不一致时,进行补帧或丢帧)

This commit is contained in:
KKEM\Administrator 2023-11-24 21:17:29 +08:00
parent 62eba8e390
commit 4f332c3d4f
2 changed files with 59 additions and 31 deletions

View File

@ -113,9 +113,7 @@ void VideoStack::parseParam(const std::string &param) {
}
}
void VideoStack::copyToBuf(const FFmpegFrame::Ptr &frame, const Param &p) {
auto &buf = _buffer;
void VideoStack::copyToBuf(const std::shared_ptr<AVFrame> &buf, const FFmpegFrame::Ptr &frame, const Param &p) {
auto sws = std::make_shared<FFmpegSws>(AV_PIX_FMT_YUV420P, p.width, p.height);
@ -203,7 +201,14 @@ void StackPlayer::init(const std::string &url) {
void StackPlayer::addStackPtr(VideoStack* that) {
//std::unique_lock<std::shared_timed_mutex> wlock(_mx);
_stacks.push_back(that);
if (!that) {
return;
}
auto it = _stacks.find(that->_stack_id);
if (it != _stacks.end()) {
return;
}
_stacks[that->_stack_id] = that;
}
void StackPlayer::delStackPtr(VideoStack *that) {
@ -214,8 +219,8 @@ void StackPlayer::delStackPtr(VideoStack *that) {
void StackPlayer::onFrame(const FFmpegFrame::Ptr &frame) {
//std::shared_lock<std::shared_timed_mutex> rlock(_mx);
for (auto &that : _stacks) {
for (auto &vsp : _stacks) {
auto &that = vsp.second;
if (!that) {
continue;
}
@ -227,12 +232,20 @@ void StackPlayer::onFrame(const FFmpegFrame::Ptr &frame) {
}
// TODO: 待实现帧缓存和帧同步
if (std::chrono::steady_clock::now() - p.lastInputTime > std::chrono::milliseconds(20)) {
that->copyToBuf(frame, p);
p.lastInputTime = std::chrono::steady_clock::now();
p.cache.push_back(frame);
if (that->isReady.test(p.order)) {
continue;
}
if (p.cache.size() >= MAX_FRAME_SIZE) {
auto start = std::chrono::high_resolution_clock::now(); // 记录迭代开始时间
for (int i = 0; i < MAX_FRAME_SIZE; i++) {
auto &front = p.cache.front();
that->copyToBuf(that->_buffers[i], front, p);
p.cache.pop_front();
that->isReady.set(p.order);
}
}
}
}
}
@ -252,18 +265,23 @@ void VideoStack::init() {
// dev->initAudio(); //TODO:音频
_dev->addTrackCompleted();
_buffer.reset(av_frame_alloc(), [](AVFrame *frame_) { av_frame_free(&frame_); });
for (int i = 0; i < MAX_FRAME_SIZE; i++) {
_buffer->width = _width;
_buffer->height = _height;
_buffer->format = _pixfmt;
std::shared_ptr<AVFrame> frame(av_frame_alloc(), [](AVFrame *frame_) { av_frame_free(&frame_); });
av_frame_get_buffer(_buffer.get(), 32);
frame->width = _width;
frame->height = _height;
frame->format = _pixfmt;
av_frame_get_buffer(frame.get(), 32);
_buffers.push_back(frame);
}
// setBackground(0, 0, 0);
_isExit = false;
int i = 0;
for (auto &v : _params) {
for (auto &p : v) {
if (p.stream_id.empty()) {
@ -277,6 +295,9 @@ void VideoStack::init() {
p.tmp->format = _pixfmt;
av_frame_get_buffer(p.tmp.get(), 32);*/
p.order = i++;
flag.set(p.order);
auto it = playerMap.find(p.stream_id);
if (it == playerMap.end()) {
@ -302,16 +323,15 @@ void VideoStack::start() {
int64_t pts = 0, index = 0;
auto interval = milliseconds(40); // 设置间隔时间为40毫秒
while (!_isExit) {
auto start = high_resolution_clock::now();
_dev->inputYUV((char **)_buffer->data, _buffer->linesize, pts);
if (isReady == flag) {
for (auto &buf : _buffers) {
_dev->inputYUV((char **)buf->data, buf->linesize, pts);
pts += 40;
index++;
auto end = high_resolution_clock::now();
auto duration = duration_cast<milliseconds>(end - start);
if (duration < interval) {
std::this_thread::sleep_for(interval - duration); // 如果迭代花费时间小于间隔时间,等待剩余时间
}
isReady = 0;
} else {
std::this_thread::sleep_for(std::chrono::milliseconds(5));
}
}
}).detach();

View File

@ -14,13 +14,15 @@
#include "Common/Device.h"
#include "api/include/mk_transcode.h"
#include "json/json.h"
#include <bitset>
#include <shared_mutex>
static std::string testJson
= R"({"msg":"set_combine_source","gapv":0.002,"gaph":0.001,"width":1920,"urls":[["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"],["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"],["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"],["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"]],"id":"89","rows":4,"cols":4,"height":1080,"span":[[[0,0],[1,1]],[[2,3],[3,3]]]})";
= R"({"msg":"set_combine_source","gapv":0.002,"gaph":0.001,"width":1920,"urls":[["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"]],"id":"89","rows":4,"cols":4,"height":1080,"span":[[[0,0],[1,1]],[[2,3],[3,3]]]})";
static constexpr int MAX_FRAME_SIZE = 24;
class VideoStack : public std::enable_shared_from_this<VideoStack> {
public:
@ -32,8 +34,10 @@ public:
std::string stream_id;
// RuntimeParam
std::chrono::steady_clock::time_point lastInputTime;
//std::chrono::steady_clock::time_point lastInputTime;
//std::shared_ptr<AVFrame> tmp; // 临时存储缩放后的frame
int order;
std::list<mediakit::FFmpegFrame::Ptr> cache;
};
VideoStack() = default;
@ -48,7 +52,7 @@ public:
void start();
void copyToBuf(const mediakit::FFmpegFrame::Ptr &frame, const Param &p);
void copyToBuf(const std::shared_ptr<AVFrame> &buf, const mediakit::FFmpegFrame::Ptr &frame, const Param &p);
public:
std::string _stack_id;
@ -65,7 +69,10 @@ public:
mediakit::DevChannel::Ptr _dev;
std::shared_ptr<AVFrame> _buffer;
std::vector<std::shared_ptr<AVFrame>> _buffers;
std::bitset<1024> isReady;
std::bitset<1024> flag;
};
@ -85,7 +92,8 @@ private:
std::string _url;
//std::shared_timed_mutex _mx;
std::vector<VideoStack*> _stacks; // 需要给哪些Stack对象推送帧数据
//std::vector<VideoStack*> _stacks; // 需要给哪些Stack对象推送帧数据
std::unordered_map<std::string, VideoStack *> _stacks;
mediakit::MediaPlayer::Ptr _player;
};