ZLMediaKit/tests/test_video_stack.h

113 lines
3.5 KiB
C++
Raw Normal View History

2023-11-23 17:01:21 +08:00
#pragma once
#include "Common/config.h"
#include "Player/PlayerProxy.h"
#include "Rtsp/UDPServer.h"
#include "Thread/WorkThreadPool.h"
#include "Util/CMD.h"
#include "Util/logger.h"
#include "Util/onceToken.h"
#include <atomic>
#include <iostream>
#include <signal.h>
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "api/include/mk_transcode.h"
#include "json/json.h"
#include <bitset>
2023-11-23 17:01:21 +08:00
#include <shared_mutex>
static std::string testJson
= R"({"msg":"set_combine_source","gapv":0.002,"gaph":0.001,"width":1920,"urls":[["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"]],"id":"89","rows":4,"cols":4,"height":1080,"span":[[[0,0],[1,1]],[[2,3],[3,3]]]})";
2023-11-23 17:01:21 +08:00
static constexpr int MAX_FRAME_SIZE = 24;
2023-11-23 17:01:21 +08:00
class VideoStack : public std::enable_shared_from_this<VideoStack> {
public:
struct Param {
int posX;
int posY;
int width;
int height;
std::string url;
2023-11-23 17:01:21 +08:00
//运行时需要用到的参数
//bool isDisconnected = false; //TODO: 用于标识是否断线,对于断线的做特殊处理
2023-11-23 17:01:21 +08:00
//std::shared_ptr<AVFrame> tmp; // 临时存储缩放后的frame
int order; //标识有效流的序号
std::list<mediakit::FFmpegFrame::Ptr> cache;
2023-11-23 17:01:21 +08:00
};
VideoStack() = default;
~VideoStack() { _isExit = true; }
// 解析参数 存储到_param中
void parseParam(const std::string &param = testJson);
// 创建推流对象
void init();
void start();
//实现拼接
void copyToBuf(const std::shared_ptr<AVFrame> &buf, const mediakit::FFmpegFrame::Ptr &frame, const Param &p);
2023-11-23 17:01:21 +08:00
public:
std::string _stack_id;
int _width;
int _height;
AVPixelFormat _pixfmt = AV_PIX_FMT_YUV420P;
float _fps = 25.0;
int _bitRate = 2 * 1024 * 1024;
bool _isExit;
std::vector<VideoStack::Param> _params; // 存储参数
2023-11-23 17:01:21 +08:00
mediakit::DevChannel::Ptr _dev;
std::vector<std::shared_ptr<AVFrame>> _buffers;
//这两个bit位 用于判断该拼接所需的视频流的缓存帧是否都以就绪
std::bitset<1024> isReady;
std::bitset<1024> flag;
//FFmpegFrame::Ptr DisconnPic; //TODO: 读取一张准备好的图片作为断线时的frame
2023-11-23 17:01:21 +08:00
};
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public:
using Ptr = std::shared_ptr<StackPlayer>;
void play(const std::string &url);
2023-11-23 17:01:21 +08:00
void addStackPtr(VideoStack *that);
void delStackPtr(VideoStack *that);
void onFrame(const mediakit::FFmpegFrame::Ptr &frame);
void syncFrameByFps(const mediakit::FFmpegFrame::Ptr& frame, VideoStack::Param& p, float target_fps);
void syncFrameByPts(const mediakit::FFmpegFrame::Ptr& frame, VideoStack::Param& p, float target_fps);
2023-11-23 17:01:21 +08:00
private:
std::string _url;
float fps;
mediakit::FFmpegFrame::Ptr lastFrame;
float diff = 0;
2023-11-23 17:01:21 +08:00
//std::shared_timed_mutex _mx;
std::unordered_map<std::string, VideoStack *> _stacks; // 需要给哪些Stack对象推送帧数据
2023-11-23 17:01:21 +08:00
mediakit::MediaPlayer::Ptr _player;
};
static std::mutex mx;
static std::unordered_map<std::string, StackPlayer::Ptr> playerMap;