添加dts生成算法,兼容含B帧的rtsp推流

This commit is contained in:
xiongziliang 2020-01-14 10:04:24 +08:00
parent b55db11de3
commit 9fa6e9d8d9
14 changed files with 187 additions and 111 deletions

View File

@ -106,11 +106,11 @@ void DevChannel::inputH264(const char* pcData, int iDataLen, uint32_t dts,uint32
} }
H264Frame::Ptr frame = std::make_shared<H264Frame>(); H264Frame::Ptr frame = std::make_shared<H264Frame>();
frame->timeStamp = dts; frame->_dts = dts;
frame->ptsStamp = pts; frame->_pts = pts;
frame->buffer.assign("\x00\x00\x00\x01",4); frame->_buffer.assign("\x00\x00\x00\x01",4);
frame->buffer.append(pcData + prefixeSize, iDataLen - prefixeSize); frame->_buffer.append(pcData + prefixeSize, iDataLen - prefixeSize);
frame->iPrefixSize = 4; frame->_prefix_size = 4;
inputFrame(frame); inputFrame(frame);
} }
@ -131,11 +131,11 @@ void DevChannel::inputH265(const char* pcData, int iDataLen, uint32_t dts,uint32
} }
H265Frame::Ptr frame = std::make_shared<H265Frame>(); H265Frame::Ptr frame = std::make_shared<H265Frame>();
frame->timeStamp = dts; frame->_dts = dts;
frame->ptsStamp = pts; frame->_pts = pts;
frame->buffer.assign("\x00\x00\x00\x01",4); frame->_buffer.assign("\x00\x00\x00\x01",4);
frame->buffer.append(pcData + prefixeSize, iDataLen - prefixeSize); frame->_buffer.append(pcData + prefixeSize, iDataLen - prefixeSize);
frame->iPrefixSize = 4; frame->_prefix_size = 4;
inputFrame(frame); inputFrame(frame);
} }

View File

@ -42,13 +42,14 @@ int64_t DeltaStamp::deltaStamp(int64_t stamp) {
} }
int64_t ret = stamp - _last_stamp; int64_t ret = stamp - _last_stamp;
if(ABS(ret) < MAX_DELTA_STAMP){ if(ret >= 0){
//时间戳变化不明显 //时间戳增量为正,返回之
_last_stamp = stamp; _last_stamp = stamp;
return ret; //在直播情况下时间戳增量不得大于MAX_DELTA_STAMP
return ret < MAX_DELTA_STAMP ? ret : (_playback ? ret : 0);
} }
//时间戳变化太明显可能回环了或者seek //时间戳增量为负,说明时间戳回环了或回退
_last_stamp = stamp; _last_stamp = stamp;
return _playback ? ret : 0; return _playback ? ret : 0;
} }
@ -99,4 +100,52 @@ int64_t Stamp::getRelativeStamp() const {
} }
bool DtsGenerator::getDts(uint32_t pts, uint32_t &dts){
bool ret = false;
if(pts == _last_pts){
//pts未变返回上次结果
if(_last_dts){
dts = _last_dts;
ret = true;
}
return ret;
}
ret = getDts_l(pts,dts);
if(ret){
//保存本次结果
_last_dts = dts;
}
//记录上次pts
_last_pts = pts;
return ret;
}
bool DtsGenerator::getDts_l(uint32_t pts, uint32_t &dts){
if(pts > _last_max_pts){
if(!_sorter_max_size && _frames_since_last_max_pts && _count_sorter_max_size++ > 0){
_sorter_max_size = _frames_since_last_max_pts;
_dts_pts_offset = (pts - _last_max_pts) / 2;
InfoL << _sorter_max_size << " " << _dts_pts_offset;
}
_frames_since_last_max_pts = 0;
_last_max_pts = pts;
}
_pts_sorter.emplace(pts);
++_frames_since_last_max_pts;
if(_sorter_max_size && _pts_sorter.size() > _sorter_max_size){
auto it = _pts_sorter.begin();
dts = *it + _dts_pts_offset;
if(dts > pts){
//dts不能大于pts(基本不可能到达这个逻辑)
dts = pts;
}
_pts_sorter.erase(it);
return true;
}
return false;
}
}//namespace mediakit }//namespace mediakit

View File

@ -27,8 +27,9 @@
#ifndef ZLMEDIAKIT_STAMP_H #ifndef ZLMEDIAKIT_STAMP_H
#define ZLMEDIAKIT_STAMP_H #define ZLMEDIAKIT_STAMP_H
#include "Util/TimeTicker.h" #include <set>
#include <cstdint> #include <cstdint>
#include "Util/TimeTicker.h"
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
@ -88,6 +89,27 @@ private:
SmoothTicker _ticker; SmoothTicker _ticker;
}; };
class DtsGenerator{
public:
DtsGenerator() = default;
~DtsGenerator() = default;
bool getDts(uint32_t pts, uint32_t &dts);
private:
bool getDts_l(uint32_t pts, uint32_t &dts);
private:
uint32_t _dts_pts_offset = 0;
uint32_t _last_dts = 0;
uint32_t _last_pts = 0;
uint32_t _last_max_pts = 0;
int _frames_since_last_max_pts = 0;
int _sorter_max_size = 0;
int _count_sorter_max_size = 0;
set<uint32_t> _pts_sorter;
};
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_STAMP_H #endif //ZLMEDIAKIT_STAMP_H

View File

@ -102,7 +102,7 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
rtmpPkt->bodySize = rtmpPkt->strBuf.size(); rtmpPkt->bodySize = rtmpPkt->strBuf.size();
rtmpPkt->chunkId = CHUNK_AUDIO; rtmpPkt->chunkId = CHUNK_AUDIO;
rtmpPkt->streamId = STREAM_MEDIA; rtmpPkt->streamId = STREAM_MEDIA;
rtmpPkt->timeStamp = frame->stamp(); rtmpPkt->timeStamp = frame->dts();
rtmpPkt->typeId = MSG_AUDIO; rtmpPkt->typeId = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt, false); RtmpCodec::inputRtmp(rtmpPkt, false);
} }

View File

@ -41,7 +41,7 @@ AACRtpEncoder::AACRtpEncoder(uint32_t ui32Ssrc,
void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) { void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
GET_CONFIG(uint32_t, cycleMS, Rtp::kCycleMS); GET_CONFIG(uint32_t, cycleMS, Rtp::kCycleMS);
auto uiStamp = frame->stamp(); auto uiStamp = frame->dts();
auto pcData = frame->data() + frame->prefixSize(); auto pcData = frame->data() + frame->prefixSize();
auto iLen = frame->size() - frame->prefixSize(); auto iLen = frame->size() - frame->prefixSize();

View File

@ -81,13 +81,6 @@ class Frame : public Buffer, public CodecInfo {
public: public:
typedef std::shared_ptr<Frame> Ptr; typedef std::shared_ptr<Frame> Ptr;
virtual ~Frame(){} virtual ~Frame(){}
/**
* ,使dts() pts()
*/
inline uint32_t stamp() const {
return dts();
};
/** /**
* *

View File

@ -53,21 +53,21 @@ public:
} NalType; } NalType;
char *data() const override{ char *data() const override{
return (char *)buffer.data(); return (char *)_buffer.data();
} }
uint32_t size() const override { uint32_t size() const override {
return buffer.size(); return _buffer.size();
} }
uint32_t dts() const override { uint32_t dts() const override {
return timeStamp; return _dts;
} }
uint32_t pts() const override { uint32_t pts() const override {
return ptsStamp ? ptsStamp : timeStamp; return _pts ? _pts : _dts;
} }
uint32_t prefixSize() const override{ uint32_t prefixSize() const override{
return iPrefixSize; return _prefix_size;
} }
TrackType getTrackType() const override{ TrackType getTrackType() const override{
@ -79,11 +79,11 @@ public:
} }
bool keyFrame() const override { bool keyFrame() const override {
return H264_TYPE(buffer[iPrefixSize]) == H264Frame::NAL_IDR; return H264_TYPE(_buffer[_prefix_size]) == H264Frame::NAL_IDR;
} }
bool configFrame() const override{ bool configFrame() const override{
switch(H264_TYPE(buffer[iPrefixSize]) ){ switch(H264_TYPE(_buffer[_prefix_size]) ){
case H264Frame::NAL_SPS: case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS: case H264Frame::NAL_PPS:
return true; return true;
@ -92,10 +92,10 @@ public:
} }
} }
public: public:
uint32_t timeStamp; uint32_t _dts = 0;
uint32_t ptsStamp = 0; uint32_t _pts = 0;
string buffer; uint32_t _prefix_size = 4;
uint32_t iPrefixSize = 4; string _buffer;
}; };
@ -340,19 +340,19 @@ private:
if(!_sps.empty()){ if(!_sps.empty()){
auto spsFrame = std::make_shared<H264Frame>(); auto spsFrame = std::make_shared<H264Frame>();
spsFrame->iPrefixSize = 4; spsFrame->_prefix_size = 4;
spsFrame->buffer.assign("\x0\x0\x0\x1",4); spsFrame->_buffer.assign("\x0\x0\x0\x1",4);
spsFrame->buffer.append(_sps); spsFrame->_buffer.append(_sps);
spsFrame->timeStamp = frame->stamp(); spsFrame->_dts = frame->dts();
VideoTrack::inputFrame(spsFrame); VideoTrack::inputFrame(spsFrame);
} }
if(!_pps.empty()){ if(!_pps.empty()){
auto ppsFrame = std::make_shared<H264Frame>(); auto ppsFrame = std::make_shared<H264Frame>();
ppsFrame->iPrefixSize = 4; ppsFrame->_prefix_size = 4;
ppsFrame->buffer.assign("\x0\x0\x0\x1",4); ppsFrame->_buffer.assign("\x0\x0\x0\x1",4);
ppsFrame->buffer.append(_pps); ppsFrame->_buffer.append(_pps);
ppsFrame->timeStamp = frame->stamp(); ppsFrame->_dts = frame->dts();
VideoTrack::inputFrame(ppsFrame); VideoTrack::inputFrame(ppsFrame);
} }
} }

View File

@ -35,8 +35,8 @@ H264RtmpDecoder::H264RtmpDecoder() {
H264Frame::Ptr H264RtmpDecoder::obtainFrame() { H264Frame::Ptr H264RtmpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象 //从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = obtainObj(); auto frame = obtainObj();
frame->buffer.clear(); frame->_buffer.clear();
frame->iPrefixSize = 4; frame->_prefix_size = 4;
return frame; return frame;
} }
@ -78,10 +78,10 @@ bool H264RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
inline void H264RtmpDecoder::onGetH264(const char* pcData, int iLen, uint32_t dts,uint32_t pts) { inline void H264RtmpDecoder::onGetH264(const char* pcData, int iLen, uint32_t dts,uint32_t pts) {
#if 1 #if 1
_h264frame->timeStamp = dts; _h264frame->_dts = dts;
_h264frame->ptsStamp = pts; _h264frame->_pts = pts;
_h264frame->buffer.assign("\x0\x0\x0\x1", 4); //添加264头 _h264frame->_buffer.assign("\x0\x0\x0\x1", 4); //添加264头
_h264frame->buffer.append(pcData, iLen); _h264frame->_buffer.append(pcData, iLen);
//写入环形缓存 //写入环形缓存
RtmpCodec::inputFrame(_h264frame); RtmpCodec::inputFrame(_h264frame);
@ -144,7 +144,7 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
return; return;
} }
if(_lastPacket && _lastPacket->timeStamp != frame->stamp()) { if(_lastPacket && _lastPacket->timeStamp != frame->dts()) {
RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame()); RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame());
_lastPacket = nullptr; _lastPacket = nullptr;
} }
@ -165,7 +165,7 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
_lastPacket->chunkId = CHUNK_VIDEO; _lastPacket->chunkId = CHUNK_VIDEO;
_lastPacket->streamId = STREAM_MEDIA; _lastPacket->streamId = STREAM_MEDIA;
_lastPacket->timeStamp = frame->stamp(); _lastPacket->timeStamp = frame->dts();
_lastPacket->typeId = MSG_VIDEO; _lastPacket->typeId = MSG_VIDEO;
} }

View File

@ -70,8 +70,8 @@ H264RtpDecoder::H264RtpDecoder() {
H264Frame::Ptr H264RtpDecoder::obtainFrame() { H264Frame::Ptr H264RtpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象 //从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<H264Frame>::obtainObj(); auto frame = ResourcePoolHelper<H264Frame>::obtainObj();
frame->buffer.clear(); frame->_buffer.clear();
frame->iPrefixSize = 4; frame->_prefix_size = 4;
return frame; return frame;
} }
@ -113,9 +113,9 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (nal.type >= 0 && nal.type < 24) { if (nal.type >= 0 && nal.type < 24) {
//a full frame //a full frame
_h264frame->buffer.assign("\x0\x0\x0\x1", 4); _h264frame->_buffer.assign("\x0\x0\x0\x1", 4);
_h264frame->buffer.append((char *)frame, length); _h264frame->_buffer.append((char *)frame, length);
_h264frame->timeStamp = rtppack->timeStamp; _h264frame->_pts = rtppack->timeStamp;
auto key = _h264frame->keyFrame(); auto key = _h264frame->keyFrame();
onGetH264(_h264frame); onGetH264(_h264frame);
return (key); //i frame return (key); //i frame
@ -142,9 +142,9 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
//过小的帧丢弃 //过小的帧丢弃
NALU nal; NALU nal;
MakeNalu(ptr[0], nal); MakeNalu(ptr[0], nal);
_h264frame->buffer.assign("\x0\x0\x0\x1", 4); _h264frame->_buffer.assign("\x0\x0\x0\x1", 4);
_h264frame->buffer.append((char *)ptr, len); _h264frame->_buffer.append((char *)ptr, len);
_h264frame->timeStamp = rtppack->timeStamp; _h264frame->_pts = rtppack->timeStamp;
if(nal.type == H264Frame::NAL_IDR){ if(nal.type == H264Frame::NAL_IDR){
haveIDR = true; haveIDR = true;
} }
@ -162,10 +162,10 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (fu.S) { if (fu.S) {
//该帧的第一个rtp包 FU-A start //该帧的第一个rtp包 FU-A start
char tmp = (nal.forbidden_zero_bit << 7 | nal.nal_ref_idc << 5 | fu.type); char tmp = (nal.forbidden_zero_bit << 7 | nal.nal_ref_idc << 5 | fu.type);
_h264frame->buffer.assign("\x0\x0\x0\x1", 4); _h264frame->_buffer.assign("\x0\x0\x0\x1", 4);
_h264frame->buffer.push_back(tmp); _h264frame->_buffer.push_back(tmp);
_h264frame->buffer.append((char *)frame + 2, length - 2); _h264frame->_buffer.append((char *)frame + 2, length - 2);
_h264frame->timeStamp = rtppack->timeStamp; _h264frame->_pts = rtppack->timeStamp;
//该函数return时保存下当前sequence,以便下次对比seq是否连续 //该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence; _lastSeq = rtppack->sequence;
return _h264frame->keyFrame(); return _h264frame->keyFrame();
@ -173,22 +173,22 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) { if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃 //中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃
_h264frame->buffer.clear(); _h264frame->_buffer.clear();
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃"; WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false; return false;
} }
if (!fu.E) { if (!fu.E) {
//该帧的中间rtp包 FU-A mid //该帧的中间rtp包 FU-A mid
_h264frame->buffer.append((char *)frame + 2, length - 2); _h264frame->_buffer.append((char *)frame + 2, length - 2);
//该函数return时保存下当前sequence,以便下次对比seq是否连续 //该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence; _lastSeq = rtppack->sequence;
return false; return false;
} }
//该帧最后一个rtp包 FU-A end //该帧最后一个rtp包 FU-A end
_h264frame->buffer.append((char *)frame + 2, length - 2); _h264frame->_buffer.append((char *)frame + 2, length - 2);
_h264frame->timeStamp = rtppack->timeStamp; _h264frame->_pts = rtppack->timeStamp;
auto key = _h264frame->keyFrame(); auto key = _h264frame->keyFrame();
onGetH264(_h264frame); onGetH264(_h264frame);
return key; return key;
@ -209,8 +209,12 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
} }
void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) { void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) {
//写入环形缓存 //根据pts计算dts
RtpCodec::inputFrame(frame); auto flag = _dts_generator.getDts(frame->_pts,frame->_dts);
if(flag){
//写入环形缓存
RtpCodec::inputFrame(frame);
}
_h264frame = obtainFrame(); _h264frame = obtainFrame();
} }
@ -232,7 +236,7 @@ H264RtpEncoder::H264RtpEncoder(uint32_t ui32Ssrc,
void H264RtpEncoder::inputFrame(const Frame::Ptr &frame) { void H264RtpEncoder::inputFrame(const Frame::Ptr &frame) {
GET_CONFIG(uint32_t,cycleMS,Rtp::kCycleMS); GET_CONFIG(uint32_t,cycleMS,Rtp::kCycleMS);
auto pcData = frame->data() + frame->prefixSize(); auto pcData = frame->data() + frame->prefixSize();
auto uiStamp = frame->stamp(); auto uiStamp = frame->pts();
auto iLen = frame->size() - frame->prefixSize(); auto iLen = frame->size() - frame->prefixSize();
//获取NALU的5bit 帧类型 //获取NALU的5bit 帧类型
unsigned char naluType = H264_TYPE(pcData[0]); unsigned char naluType = H264_TYPE(pcData[0]);

View File

@ -30,6 +30,7 @@
#include "Rtsp/RtpCodec.h" #include "Rtsp/RtpCodec.h"
#include "Util/ResourcePool.h" #include "Util/ResourcePool.h"
#include "Extension/H264.h" #include "Extension/H264.h"
#include "Common/Stamp.h"
using namespace toolkit; using namespace toolkit;
namespace mediakit{ namespace mediakit{
@ -66,6 +67,7 @@ private:
H264Frame::Ptr obtainFrame(); H264Frame::Ptr obtainFrame();
private: private:
H264Frame::Ptr _h264frame; H264Frame::Ptr _h264frame;
DtsGenerator _dts_generator;
int _lastSeq = 0; int _lastSeq = 0;
}; };

View File

@ -74,23 +74,23 @@ public:
} NaleType; } NaleType;
char *data() const override { char *data() const override {
return (char *) buffer.data(); return (char *) _buffer.data();
} }
uint32_t size() const override { uint32_t size() const override {
return buffer.size(); return _buffer.size();
} }
uint32_t dts() const override { uint32_t dts() const override {
return timeStamp; return _dts;
} }
uint32_t pts() const override { uint32_t pts() const override {
return ptsStamp ? ptsStamp : timeStamp; return _pts ? _pts : _dts;
} }
uint32_t prefixSize() const override { uint32_t prefixSize() const override {
return iPrefixSize; return _prefix_size;
} }
TrackType getTrackType() const override { TrackType getTrackType() const override {
@ -102,11 +102,11 @@ public:
} }
bool keyFrame() const override { bool keyFrame() const override {
return isKeyFrame(H265_TYPE(buffer[iPrefixSize])); return isKeyFrame(H265_TYPE(_buffer[_prefix_size]));
} }
bool configFrame() const override{ bool configFrame() const override{
switch(H265_TYPE(buffer[iPrefixSize])){ switch(H265_TYPE(_buffer[_prefix_size])){
case H265Frame::NAL_VPS: case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS: case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS: case H265Frame::NAL_PPS:
@ -131,10 +131,10 @@ public:
} }
public: public:
uint32_t timeStamp; uint32_t _dts = 0;
uint32_t ptsStamp = 0; uint32_t _pts = 0;
string buffer; uint32_t _prefix_size = 4;
uint32_t iPrefixSize = 4; string _buffer;
}; };
@ -356,27 +356,27 @@ private:
} }
if(!_vps.empty()){ if(!_vps.empty()){
auto vpsFrame = std::make_shared<H265Frame>(); auto vpsFrame = std::make_shared<H265Frame>();
vpsFrame->iPrefixSize = 4; vpsFrame->_prefix_size = 4;
vpsFrame->buffer.assign("\x0\x0\x0\x1", 4); vpsFrame->_buffer.assign("\x0\x0\x0\x1", 4);
vpsFrame->buffer.append(_vps); vpsFrame->_buffer.append(_vps);
vpsFrame->timeStamp = frame->stamp(); vpsFrame->_dts = frame->dts();
VideoTrack::inputFrame(vpsFrame); VideoTrack::inputFrame(vpsFrame);
} }
if (!_sps.empty()) { if (!_sps.empty()) {
auto spsFrame = std::make_shared<H265Frame>(); auto spsFrame = std::make_shared<H265Frame>();
spsFrame->iPrefixSize = 4; spsFrame->_prefix_size = 4;
spsFrame->buffer.assign("\x0\x0\x0\x1", 4); spsFrame->_buffer.assign("\x0\x0\x0\x1", 4);
spsFrame->buffer.append(_sps); spsFrame->_buffer.append(_sps);
spsFrame->timeStamp = frame->stamp(); spsFrame->_dts = frame->dts();
VideoTrack::inputFrame(spsFrame); VideoTrack::inputFrame(spsFrame);
} }
if (!_pps.empty()) { if (!_pps.empty()) {
auto ppsFrame = std::make_shared<H265Frame>(); auto ppsFrame = std::make_shared<H265Frame>();
ppsFrame->iPrefixSize = 4; ppsFrame->_prefix_size = 4;
ppsFrame->buffer.assign("\x0\x0\x0\x1", 4); ppsFrame->_buffer.assign("\x0\x0\x0\x1", 4);
ppsFrame->buffer.append(_pps); ppsFrame->_buffer.append(_pps);
ppsFrame->timeStamp = frame->stamp(); ppsFrame->_dts = frame->dts();
VideoTrack::inputFrame(ppsFrame); VideoTrack::inputFrame(ppsFrame);
} }
} }

View File

@ -70,8 +70,8 @@ H265RtpDecoder::H265RtpDecoder() {
H265Frame::Ptr H265RtpDecoder::obtainFrame() { H265Frame::Ptr H265RtpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象 //从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<H265Frame>::obtainObj(); auto frame = ResourcePoolHelper<H265Frame>::obtainObj();
frame->buffer.clear(); frame->_buffer.clear();
frame->iPrefixSize = 4; frame->_prefix_size = 4;
return frame; return frame;
} }
@ -99,11 +99,11 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
MakeFU(frame[2], fu); MakeFU(frame[2], fu);
if (fu.S) { if (fu.S) {
//该帧的第一个rtp包 //该帧的第一个rtp包
_h265frame->buffer.assign("\x0\x0\x0\x1", 4); _h265frame->_buffer.assign("\x0\x0\x0\x1", 4);
_h265frame->buffer.push_back(fu.type << 1); _h265frame->_buffer.push_back(fu.type << 1);
_h265frame->buffer.push_back(0x01); _h265frame->_buffer.push_back(0x01);
_h265frame->buffer.append((char *) frame + 3, length - 3); _h265frame->_buffer.append((char *) frame + 3, length - 3);
_h265frame->timeStamp = rtppack->timeStamp; _h265frame->_pts = rtppack->timeStamp;
//该函数return时保存下当前sequence,以便下次对比seq是否连续 //该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence; _lastSeq = rtppack->sequence;
return (_h265frame->keyFrame()); //i frame return (_h265frame->keyFrame()); //i frame
@ -111,22 +111,22 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) { if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃 //中间的或末尾的rtp包其seq必须连续(如果回环了则判定为连续)否则说明rtp丢包那么该帧不完整必须得丢弃
_h265frame->buffer.clear(); _h265frame->_buffer.clear();
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃"; WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false; return false;
} }
if (!fu.E) { if (!fu.E) {
//该帧的中间rtp包 //该帧的中间rtp包
_h265frame->buffer.append((char *) frame + 3, length - 3); _h265frame->_buffer.append((char *) frame + 3, length - 3);
//该函数return时保存下当前sequence,以便下次对比seq是否连续 //该函数return时保存下当前sequence,以便下次对比seq是否连续
_lastSeq = rtppack->sequence; _lastSeq = rtppack->sequence;
return false; return false;
} }
//该帧最后一个rtp包 //该帧最后一个rtp包
_h265frame->buffer.append((char *) frame + 3, length - 3); _h265frame->_buffer.append((char *) frame + 3, length - 3);
_h265frame->timeStamp = rtppack->timeStamp; _h265frame->_pts = rtppack->timeStamp;
auto key = _h265frame->keyFrame(); auto key = _h265frame->keyFrame();
onGetH265(_h265frame); onGetH265(_h265frame);
return key; return key;
@ -134,9 +134,9 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
default: // 4.4.1. Single NAL Unit Packets (p24) default: // 4.4.1. Single NAL Unit Packets (p24)
//a full frame //a full frame
_h265frame->buffer.assign("\x0\x0\x0\x1", 4); _h265frame->_buffer.assign("\x0\x0\x0\x1", 4);
_h265frame->buffer.append((char *)frame, length); _h265frame->_buffer.append((char *)frame, length);
_h265frame->timeStamp = rtppack->timeStamp; _h265frame->_pts = rtppack->timeStamp;
auto key = _h265frame->keyFrame(); auto key = _h265frame->keyFrame();
onGetH265(_h265frame); onGetH265(_h265frame);
return key; return key;
@ -144,8 +144,12 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
} }
void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) { void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) {
//写入环形缓存 //计算dts
RtpCodec::inputFrame(frame); auto flag = _dts_generator.getDts(frame->_pts,frame->_dts);
if(flag){
//写入环形缓存
RtpCodec::inputFrame(frame);
}
_h265frame = obtainFrame(); _h265frame = obtainFrame();
} }
@ -167,7 +171,7 @@ H265RtpEncoder::H265RtpEncoder(uint32_t ui32Ssrc,
void H265RtpEncoder::inputFrame(const Frame::Ptr &frame) { void H265RtpEncoder::inputFrame(const Frame::Ptr &frame) {
GET_CONFIG(uint32_t,cycleMS,Rtp::kCycleMS); GET_CONFIG(uint32_t,cycleMS,Rtp::kCycleMS);
uint8_t *pcData = (uint8_t*)frame->data() + frame->prefixSize(); uint8_t *pcData = (uint8_t*)frame->data() + frame->prefixSize();
auto uiStamp = frame->stamp(); auto uiStamp = frame->pts();
auto iLen = frame->size() - frame->prefixSize(); auto iLen = frame->size() - frame->prefixSize();
unsigned char naluType = H265_TYPE(pcData[0]); //获取NALU的5bit 帧类型 unsigned char naluType = H265_TYPE(pcData[0]); //获取NALU的5bit 帧类型
uiStamp %= cycleMS; uiStamp %= cycleMS;

View File

@ -30,6 +30,7 @@
#include "Rtsp/RtpCodec.h" #include "Rtsp/RtpCodec.h"
#include "Util/ResourcePool.h" #include "Util/ResourcePool.h"
#include "Extension/H265.h" #include "Extension/H265.h"
#include "Common/Stamp.h"
using namespace toolkit; using namespace toolkit;
@ -67,6 +68,7 @@ private:
H265Frame::Ptr obtainFrame(); H265Frame::Ptr obtainFrame();
private: private:
H265Frame::Ptr _h265frame; H265Frame::Ptr _h265frame;
DtsGenerator _dts_generator;
int _lastSeq = 0; int _lastSeq = 0;
}; };

View File

@ -219,7 +219,7 @@ public:
virtual ~MuteAudioMaker(){} virtual ~MuteAudioMaker(){}
void inputFrame(const Frame::Ptr &frame) override { void inputFrame(const Frame::Ptr &frame) override {
if(frame->getTrackType() == TrackVideo){ if(frame->getTrackType() == TrackVideo){
auto iAudioIndex = frame->stamp() / MUTE_ADTS_DATA_MS; auto iAudioIndex = frame->dts() / MUTE_ADTS_DATA_MS;
if(_iAudioIndex != iAudioIndex){ if(_iAudioIndex != iAudioIndex){
_iAudioIndex = iAudioIndex; _iAudioIndex = iAudioIndex;
auto aacFrame = std::make_shared<AACFrameCacheAble>((char *)MUTE_ADTS_DATA, MUTE_ADTS_DATA_LEN, _iAudioIndex * MUTE_ADTS_DATA_MS); auto aacFrame = std::make_shared<AACFrameCacheAble>((char *)MUTE_ADTS_DATA, MUTE_ADTS_DATA_LEN, _iAudioIndex * MUTE_ADTS_DATA_MS);