mirror of
https://github.com/ZLMediaKit/ZLMediaKit.git
synced 2024-11-27 13:49:01 +08:00
113 lines
3.5 KiB
C++
113 lines
3.5 KiB
C++
#pragma once
|
||
|
||
#include "Common/config.h"
|
||
#include "Player/PlayerProxy.h"
|
||
#include "Rtsp/UDPServer.h"
|
||
#include "Thread/WorkThreadPool.h"
|
||
#include "Util/CMD.h"
|
||
#include "Util/logger.h"
|
||
#include "Util/onceToken.h"
|
||
#include <atomic>
|
||
#include <iostream>
|
||
#include <signal.h>
|
||
#include "Codec/Transcode.h"
|
||
#include "Common/Device.h"
|
||
#include "api/include/mk_transcode.h"
|
||
#include "json/json.h"
|
||
#include <bitset>
|
||
#include <shared_mutex>
|
||
|
||
|
||
static std::string testJson
|
||
= R"({"msg":"set_combine_source","gapv":0.002,"gaph":0.001,"width":1920,"urls":[["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"],["rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test","rtsp://kkem.me:1554/live/test"]],"id":"89","rows":4,"cols":4,"height":1080,"span":[[[0,0],[1,1]],[[2,3],[3,3]]]})";
|
||
|
||
|
||
static constexpr int MAX_FRAME_SIZE = 24;
|
||
|
||
class VideoStack : public std::enable_shared_from_this<VideoStack> {
|
||
public:
|
||
struct Param {
|
||
int posX;
|
||
int posY;
|
||
int width;
|
||
int height;
|
||
std::string url;
|
||
|
||
//运行时需要用到的参数
|
||
//bool isDisconnected = false; //TODO: 用于标识是否断线,对于断线的做特殊处理
|
||
//std::shared_ptr<AVFrame> tmp; // 临时存储缩放后的frame
|
||
int order; //标识有效流的序号
|
||
std::list<mediakit::FFmpegFrame::Ptr> cache;
|
||
};
|
||
|
||
VideoStack() = default;
|
||
~VideoStack() { _isExit = true; }
|
||
|
||
|
||
// 解析参数 存储到_param中
|
||
void parseParam(const std::string ¶m = testJson);
|
||
|
||
// 创建推流对象
|
||
void init();
|
||
|
||
void start();
|
||
|
||
//实现拼接
|
||
void copyToBuf(const std::shared_ptr<AVFrame> &buf, const mediakit::FFmpegFrame::Ptr &frame, const Param &p);
|
||
|
||
public:
|
||
std::string _stack_id;
|
||
|
||
int _width;
|
||
int _height;
|
||
AVPixelFormat _pixfmt = AV_PIX_FMT_YUV420P;
|
||
float _fps = 25.0;
|
||
int _bitRate = 2 * 1024 * 1024;
|
||
|
||
bool _isExit;
|
||
|
||
std::vector<VideoStack::Param> _params; // 存储参数
|
||
|
||
mediakit::DevChannel::Ptr _dev;
|
||
|
||
std::vector<std::shared_ptr<AVFrame>> _buffers;
|
||
|
||
//这两个bit位 用于判断该拼接所需的视频流的缓存帧是否都以就绪
|
||
std::bitset<1024> isReady;
|
||
std::bitset<1024> flag;
|
||
|
||
//FFmpegFrame::Ptr DisconnPic; //TODO: 读取一张准备好的图片,作为断线时的frame
|
||
};
|
||
|
||
|
||
class StackPlayer : public std::enable_shared_from_this<StackPlayer> {
|
||
public:
|
||
using Ptr = std::shared_ptr<StackPlayer>;
|
||
|
||
void play(const std::string &url);
|
||
|
||
void addStackPtr(VideoStack *that);
|
||
|
||
void delStackPtr(VideoStack *that);
|
||
|
||
void onFrame(const mediakit::FFmpegFrame::Ptr &frame);
|
||
|
||
void syncFrameByFps(const mediakit::FFmpegFrame::Ptr& frame, VideoStack::Param& p, float target_fps);
|
||
void syncFrameByPts(const mediakit::FFmpegFrame::Ptr& frame, VideoStack::Param& p, float target_fps);
|
||
private:
|
||
std::string _url;
|
||
|
||
float fps;
|
||
mediakit::FFmpegFrame::Ptr lastFrame;
|
||
float diff = 0;
|
||
//std::shared_timed_mutex _mx;
|
||
std::unordered_map<std::string, VideoStack *> _stacks; // 需要给哪些Stack对象推送帧数据
|
||
|
||
mediakit::MediaPlayer::Ptr _player;
|
||
};
|
||
|
||
|
||
|
||
static std::mutex mx;
|
||
static std::unordered_map<std::string, StackPlayer::Ptr> playerMap;
|