ZLMediaKit/tests/test_video_stack.h
2023-11-23 17:01:21 +08:00

97 lines
2.8 KiB
C++

#pragma once
#include "Common/config.h"
#include "Player/PlayerProxy.h"
#include "Rtsp/UDPServer.h"
#include "Thread/WorkThreadPool.h"
#include "Util/CMD.h"
#include "Util/logger.h"
#include "Util/onceToken.h"
#include <atomic>
#include <iostream>
#include <signal.h>
#include "Codec/Transcode.h"
#include "Common/Device.h"
#include "api/include/mk_transcode.h"
#include "json/json.h"
#include <shared_mutex>
static std::string testJson
= R"({"msg":"set_combine_source","gapv":0.002,"gaph":0.001,"width":1920,"urls":[["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"],["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"],["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"],["rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test","rtsp://47.243.129.22:1554/live/test"]],"id":"89","rows":4,"cols":4,"height":1080,"span":[[[0,0],[1,1]],[[2,3],[3,3]]]})";
class VideoStack : public std::enable_shared_from_this<VideoStack> {
public:
struct Param {
int posX;
int posY;
int width;
int height;
std::string stream_id;
// RuntimeParam
std::chrono::steady_clock::time_point lastInputTime;
//std::shared_ptr<AVFrame> tmp; // 临时存储缩放后的frame
};
VideoStack() = default;
~VideoStack() { _isExit = true; }
// 解析参数 存储到_param中
void parseParam(const std::string &param = testJson);
// 创建推流对象
void init();
void start();
void copyToBuf(const mediakit::FFmpegFrame::Ptr &frame, const Param &p);
public:
std::string _stack_id;
int _width;
int _height;
AVPixelFormat _pixfmt = AV_PIX_FMT_YUV420P;
float _fps = 25.0;
int _bitRate = 2 * 1024 * 1024;
bool _isExit;
std::vector<std::vector<VideoStack::Param>> _params; // 存储参数
mediakit::DevChannel::Ptr _dev;
std::shared_ptr<AVFrame> _buffer;
};
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
public:
using Ptr = std::shared_ptr<StackPlayer>;
void init(const std::string &url);
void addStackPtr(VideoStack *that);
void delStackPtr(VideoStack *that);
void onFrame(const mediakit::FFmpegFrame::Ptr &frame);
private:
std::string _url;
//std::shared_timed_mutex _mx;
std::vector<VideoStack*> _stacks; // 需要给哪些Stack对象推送帧数据
mediakit::MediaPlayer::Ptr _player;
};
static std::mutex mx;
static std::unordered_map<std::string, StackPlayer::Ptr> playerMap;