ZLMediaKit/src/Common/Device.cpp

190 lines
6.2 KiB
C++
Raw Normal View History

2017-10-09 22:11:01 +08:00
/*
2020-04-04 20:30:09 +08:00
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
2017-09-27 16:20:30 +08:00
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
2020-04-04 20:30:09 +08:00
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
2017-04-01 16:35:56 +08:00
*/
2020-04-04 20:30:09 +08:00
2017-04-01 16:35:56 +08:00
#include "Device.h"
#include "Util/logger.h"
2018-09-20 15:43:49 +08:00
#include "Util/base64.h"
2019-06-28 17:25:53 +08:00
#include "Extension/AAC.h"
#include "Extension/G711.h"
2019-06-28 17:25:53 +08:00
#include "Extension/H264.h"
2019-11-01 15:40:21 +08:00
#include "Extension/H265.h"
2017-04-25 11:35:41 +08:00
2018-10-24 17:17:55 +08:00
using namespace toolkit;
2017-04-01 16:35:56 +08:00
2018-10-24 17:17:55 +08:00
namespace mediakit {
2017-04-01 16:35:56 +08:00
2020-04-18 23:56:27 +08:00
DevChannel::DevChannel(const string &vhost,
const string &app,
const string &stream_id,
float duration,
bool enable_rtsp,
bool enable_rtmp,
bool enable_hls,
bool enable_mp4) :
MultiMediaSourceMuxer(vhost, app, stream_id, duration, enable_rtsp, enable_rtmp, enable_hls, enable_mp4) {}
2017-04-01 16:35:56 +08:00
2018-10-29 11:48:24 +08:00
DevChannel::~DevChannel() {}
2017-04-01 16:35:56 +08:00
#ifdef ENABLE_X264
2017-04-01 16:35:56 +08:00
void DevChannel::inputYUV(char* apcYuv[3], int aiYuvLen[3], uint32_t uiStamp) {
2020-03-20 11:51:24 +08:00
//TimeTicker1(50);
if (!_pH264Enc) {
_pH264Enc.reset(new H264Encoder());
if (!_pH264Enc->init(_video->iWidth, _video->iHeight, _video->iFrameRate)) {
_pH264Enc.reset();
WarnL << "H264Encoder init failed!";
}
}
if (_pH264Enc) {
H264Encoder::H264Frame *pOut;
int iFrames = _pH264Enc->inputData(apcYuv, aiYuvLen, uiStamp, &pOut);
for (int i = 0; i < iFrames; i++) {
inputH264((char *) pOut[i].pucData, pOut[i].iLength, uiStamp);
}
}
2017-04-01 16:35:56 +08:00
}
#endif //ENABLE_X264
2017-04-01 16:35:56 +08:00
#ifdef ENABLE_FAAC
void DevChannel::inputPCM(char* pcData, int iDataLen, uint32_t uiStamp) {
2020-03-20 11:51:24 +08:00
if (!_pAacEnc) {
_pAacEnc.reset(new AACEncoder());
if (!_pAacEnc->init(_audio->iSampleRate, _audio->iChannel, _audio->iSampleBit)) {
_pAacEnc.reset();
WarnL << "AACEncoder init failed!";
}
}
if (_pAacEnc) {
unsigned char *pucOut;
int iRet = _pAacEnc->inputData(pcData, iDataLen, &pucOut);
if (iRet > 0) {
2020-04-18 23:56:27 +08:00
inputAAC((char *) pucOut + 7, iRet, uiStamp, pucOut);
2020-03-20 11:51:24 +08:00
}
}
2017-04-01 16:35:56 +08:00
}
#endif //ENABLE_FAAC
2017-04-01 16:35:56 +08:00
2020-04-18 23:56:27 +08:00
void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts) {
2019-07-03 16:22:12 +08:00
if(dts == 0){
2020-03-20 11:51:24 +08:00
dts = (uint32_t)_aTicker[0].elapsedTime();
}
if(pts == 0){
pts = dts;
2018-03-02 15:00:04 +08:00
}
2018-10-29 12:27:13 +08:00
int prefixeSize;
2020-04-18 23:56:27 +08:00
if (memcmp("\x00\x00\x00\x01", data, 4) == 0) {
2018-10-29 12:27:13 +08:00
prefixeSize = 4;
2020-04-18 23:56:27 +08:00
} else if (memcmp("\x00\x00\x01", data, 3) == 0) {
2018-10-29 12:27:13 +08:00
prefixeSize = 3;
} else {
prefixeSize = 0;
}
2020-01-08 12:14:27 +08:00
2020-04-18 23:56:27 +08:00
//由于rtmp/hls/mp4需要缓存时间戳相同的帧
//所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝
//在此处只拷贝一次,性能开销更低
2020-03-20 11:51:24 +08:00
H264Frame::Ptr frame = std::make_shared<H264Frame>();
frame->_dts = dts;
frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01",4);
2020-04-18 23:56:27 +08:00
frame->_buffer.append(data + prefixeSize, len - prefixeSize);
2020-03-20 11:51:24 +08:00
frame->_prefix_size = 4;
2020-01-08 12:14:27 +08:00
inputFrame(frame);
2017-04-01 16:35:56 +08:00
}
2020-04-18 23:56:27 +08:00
void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts) {
2020-03-20 11:51:24 +08:00
if(dts == 0){
dts = (uint32_t)_aTicker[0].elapsedTime();
}
if(pts == 0){
pts = dts;
}
int prefixeSize;
2020-04-18 23:56:27 +08:00
if (memcmp("\x00\x00\x00\x01", data, 4) == 0) {
2020-03-20 11:51:24 +08:00
prefixeSize = 4;
2020-04-18 23:56:27 +08:00
} else if (memcmp("\x00\x00\x01", data, 3) == 0) {
2020-03-20 11:51:24 +08:00
prefixeSize = 3;
} else {
prefixeSize = 0;
}
2020-04-18 23:56:27 +08:00
//由于rtmp/hls/mp4需要缓存时间戳相同的帧
//所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝
//在此处只拷贝一次,性能开销更低
2020-03-20 11:51:24 +08:00
H265Frame::Ptr frame = std::make_shared<H265Frame>();
frame->_dts = dts;
frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01",4);
2020-04-18 23:56:27 +08:00
frame->_buffer.append(data + prefixeSize, len - prefixeSize);
2020-03-20 11:51:24 +08:00
frame->_prefix_size = 4;
inputFrame(frame);
2019-11-01 15:40:21 +08:00
}
2020-04-18 23:56:27 +08:00
class AACFrameCacheAble : public AACFrameNoCacheAble{
public:
template <typename ... ARGS>
AACFrameCacheAble(ARGS && ...args) : AACFrameNoCacheAble(std::forward<ARGS>(args)...){};
virtual ~AACFrameCacheAble() {
delete [] _ptr;
};
2018-10-29 11:48:24 +08:00
2020-04-18 23:56:27 +08:00
bool cacheAble() const override {
return true;
2018-03-02 15:00:04 +08:00
}
2020-04-18 23:56:27 +08:00
};
void DevChannel::inputAAC(const char *data_without_adts, int len, uint32_t dts, const char *adts_header){
if(dts == 0){
dts = (uint32_t)_aTicker[1].elapsedTime();
}
if(adts_header){
if(adts_header + 7 == data_without_adts){
//adts头和帧再一起
inputFrame(std::make_shared<AACFrameNoCacheAble>((char *)data_without_adts - 7, len + 7, dts, 0, 7));
}else{
//adts头和帧不再一起
char *dataWithAdts = new char[len + 7];
memcpy(dataWithAdts, adts_header, 7);
memcpy(dataWithAdts + 7 , data_without_adts , len);
inputFrame(std::make_shared<AACFrameCacheAble>(dataWithAdts, len + 7, dts, 0, 7));
}
2020-03-20 11:51:24 +08:00
}
2017-04-01 16:35:56 +08:00
}
2020-04-18 23:56:27 +08:00
void DevChannel::inputG711(const char *data, int len, uint32_t dts){
if (dts == 0) {
dts = (uint32_t)_aTicker[1].elapsedTime();
}
2020-04-18 23:56:27 +08:00
inputFrame(std::make_shared<G711FrameNoCacheAble>(_audio->codecId, (char*)data, len, dts, 0));
}
2020-04-18 23:56:27 +08:00
void DevChannel::initVideo(const VideoInfo &info) {
2020-03-20 11:51:24 +08:00
_video = std::make_shared<VideoInfo>(info);
2020-04-18 18:46:20 +08:00
switch (info.codecId){
case CodecH265 : addTrack(std::make_shared<H265Track>()); break;
case CodecH264 : addTrack(std::make_shared<H264Track>()); break;
default: WarnL << "不支持该类型的视频编码类型:" << info.codecId; break;
}
2017-04-01 16:35:56 +08:00
}
2020-04-18 23:56:27 +08:00
void DevChannel::initAudio(const AudioInfo &info) {
2020-04-18 18:46:20 +08:00
_audio = std::make_shared<AudioInfo>(info);
switch (info.codecId) {
2020-04-18 23:56:27 +08:00
case CodecAAC : addTrack(std::make_shared<AACTrack>()); break;
2020-04-18 18:46:20 +08:00
case CodecG711A :
2020-04-18 22:13:11 +08:00
case CodecG711U : addTrack(std::make_shared<G711Track>(info.codecId, info.iSampleRate, info.iChannel, info.iSampleBit)); break;
2020-04-18 18:46:20 +08:00
default: WarnL << "不支持该类型的音频编码类型:" << info.codecId; break;
}
2017-04-01 16:35:56 +08:00
}
2018-10-26 16:09:48 +08:00
2018-10-24 17:17:55 +08:00
} /* namespace mediakit */
2017-04-01 16:35:56 +08:00