Compare commits

...

8 Commits

Author SHA1 Message Date
alex
dec0a7c4eb
Merge 90bd28249e into 06ff8461c7 2024-11-26 04:59:08 +00:00
Xiaofeng Wang
06ff8461c7
减少依赖影响范围 (#4039)
Some checks failed
Android / build (push) Has been cancelled
CodeQL / Analyze (cpp) (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Docker / build (push) Has been cancelled
Linux / build (push) Has been cancelled
macOS / build (push) Has been cancelled
Windows / build (push) Has been cancelled
See #4034
2024-11-25 11:22:29 +08:00
a839419160
326b475beb
增加pcap解析工具(#4033 #4034)
Some checks failed
Android / build (push) Has been cancelled
CodeQL / Analyze (cpp) (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Docker / build (push) Has been cancelled
Linux / build (push) Has been cancelled
macOS / build (push) Has been cancelled
Windows / build (push) Has been cancelled
2024-11-22 17:43:15 +08:00
xiongguangjie
8868320301
Add xr targetbitrate process and update js demo sdk (#4031)
Some checks are pending
Android / build (push) Waiting to run
CodeQL / Analyze (cpp) (push) Waiting to run
CodeQL / Analyze (javascript) (push) Waiting to run
Docker / build (push) Waiting to run
Linux / build (push) Waiting to run
macOS / build (push) Waiting to run
Windows / build (push) Waiting to run
rtc 增加对于rtcp xr target bitrate 的解析,参照
https://webrtc.googlesource.com/src/+/refs/heads/main/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc

zlmrtcclient.js 更新,修复推屏幕流失败的问题
2024-11-21 14:17:02 +08:00
ss002012
226b87a633
优化rtsp ntp时间戳,防止音视频不同步( #4017 #4018)
Some checks failed
Android / build (push) Has been cancelled
CodeQL / Analyze (cpp) (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
Docker / build (push) Has been cancelled
Linux / build (push) Has been cancelled
macOS / build (push) Has been cancelled
Windows / build (push) Has been cancelled
rtp时间戳可能被截断,ntp时间戳生成时优先使用未截断的原始时间戳,防止音视频不同步
2024-11-18 14:08:53 +08:00
alex
90bd28249e Translate comments in src/Common/MediaSink.cpp 2024-09-21 09:44:57 +08:00
alex
c3106d514a Translate comments in src/Common/MediaSink.cpp 2024-09-21 09:26:48 +08:00
alex
f03d414e82 Translate comments in src/Common/MediaSink.cpp 2024-09-21 09:26:17 +08:00
12 changed files with 602 additions and 24 deletions

23
cmake/FindPCAP.cmake Normal file
View File

@ -0,0 +1,23 @@
# - Try to find libpcap
#
# Once done this will define
# PCAP_FOUND - System has libpcap
# PCAP_INCLUDE_DIRS - The libpcap include directories
# PCAP_LIBRARIES - The libpcap library
# Find libpcap
FIND_PATH(
PCAP_INCLUDE_DIRS
NAMES pcap.h
)
FIND_LIBRARY(
PCAP_LIBRARIES
NAMES pcap
)
message(STATUS "PCAP LIBRARIES: " ${PCAP_LIBRARIES})
message(STATUS "PCAP INCLUDE DIRS: " ${PCAP_INCLUDE_DIRS})
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(PCAP DEFAULT_MSG PCAP_LIBRARIES PCAP_INCLUDE_DIRS)

View File

@ -114,12 +114,14 @@ void MediaSink::checkTrackIfReady() {
} }
} }
// 等待音频超时时间 // 等待音频超时时间 [AUTO-TRANSLATED:5ec16b26]
// Wait for audio timeout time
GET_CONFIG(uint32_t, kWaitAudioTrackDataMS, General::kWaitAudioTrackDataMS); GET_CONFIG(uint32_t, kWaitAudioTrackDataMS, General::kWaitAudioTrackDataMS);
if (_max_track_size > 1) { if (_max_track_size > 1) {
for (auto it = _track_map.begin(); it != _track_map.end();) { for (auto it = _track_map.begin(); it != _track_map.end();) {
if (it->second.first->getTrackType() == TrackAudio && _ticker.elapsedTime() > kWaitAudioTrackDataMS && !it->second.second) { if (it->second.first->getTrackType() == TrackAudio && _ticker.elapsedTime() > kWaitAudioTrackDataMS && !it->second.second) {
// 音频超时且完全没收到音频数据,忽略音频 // 音频超时且完全没收到音频数据,忽略音频 [AUTO-TRANSLATED:0d0fbb13]
// Audio timeout and did not receive any audio data, ignore audio
auto index = it->second.first->getIndex(); auto index = it->second.first->getIndex();
WarnL << "Audio track index " << index << " codec " << it->second.first->getCodecName() << " receive no data for long " WarnL << "Audio track index " << index << " codec " << it->second.first->getCodecName() << " receive no data for long "
<< _ticker.elapsedTime() << "ms. Ignore it!"; << _ticker.elapsedTime() << "ms. Ignore it!";

View File

@ -209,8 +209,13 @@ void RtcpHeader::net2Host(size_t len) {
RtcpXRDLRR *dlrr = (RtcpXRDLRR *)this; RtcpXRDLRR *dlrr = (RtcpXRDLRR *)this;
dlrr->net2Host(len); dlrr->net2Host(len);
TraceL << dlrr->dumpString(); TraceL << dlrr->dumpString();
} else if (xr->bt == 42){
//当有浏览器将屏幕推流到服务器时会发生这个, 暂时没发现什么作用,先解析出来,不做处理
RtcpXRTargetBitrate* tb = (RtcpXRTargetBitrate *)this;
tb->net2Host(len);
//TraceL << tb->dumpString();
} else { } else {
throw std::runtime_error(StrPrinter << "rtcp xr bt " << xr->bt << " not support"); throw std::runtime_error(StrPrinter << "rtcp xr bt " << (int)xr->bt << " not support");
} }
break; break;
} }
@ -796,6 +801,71 @@ std::shared_ptr<RtcpXRDLRR> RtcpXRDLRR::create(size_t item_count) {
return std::shared_ptr<RtcpXRDLRR>(ptr, [](RtcpXRDLRR *ptr) { delete[](char *) ptr; }); return std::shared_ptr<RtcpXRDLRR>(ptr, [](RtcpXRDLRR *ptr) { delete[](char *) ptr; });
} }
////////////////////////////////////
string RtcpXRTargetBitrateItem::dumpString() const {
_StrPrinter printer;
printer << "Spatial Layer :" << spatial_layer << "\r\n";
printer << "Temporal Layer :" << temporal_layer << "\r\n";
printer << "Target Bitrate: " << target_bitrate << "\r\n";
return std::move(printer);
}
void RtcpXRTargetBitrateItem::net2Host() {
target_bitrate = ntohl(target_bitrate) >> 8;
}
std::vector<RtcpXRTargetBitrateItem *> RtcpXRTargetBitrate::getItemList() {
auto count = block_length;
RtcpXRTargetBitrateItem *ptr = &items;
vector<RtcpXRTargetBitrateItem *> ret;
for (int i = 0; i < (int)count; ++i) {
ret.emplace_back(ptr);
++ptr;
}
return ret;
}
string RtcpXRTargetBitrate::dumpString() const {
_StrPrinter printer;
printer << RtcpHeader::dumpHeader();
printer << "ssrc :" << ssrc << "\r\n";
printer << "bt :" << (int)bt << "\r\n";
printer << "block_length : " << block_length << "\r\n";
auto items_list = ((RtcpXRTargetBitrate *)this)->getItemList();
auto i = 0;
for (auto &item : items_list) {
printer << "---- item:" << i++ << " ----\r\n";
printer << item->dumpString();
}
return std::move(printer);
}
void RtcpXRTargetBitrate::net2Host(size_t size) {
static const size_t kMinSize = sizeof(RtcpHeader);
CHECK_MIN_SIZE(size, kMinSize);
ssrc = ntohl(ssrc);
block_length = ntohs(block_length);
auto count = block_length;
for (int i = 0; i < (int)count; ++i) {
RtcpXRTargetBitrateItem *ptr = &items;
ptr->net2Host();
ptr++;
}
}
std::shared_ptr<RtcpXRTargetBitrate> RtcpXRTargetBitrate::create(size_t item_count) {
auto real_size = sizeof(RtcpXRTargetBitrate) - sizeof(RtcpXRTargetBitrateItem) + item_count * sizeof(RtcpXRTargetBitrateItem);
auto bytes = alignSize(real_size);
auto ptr = (RtcpXRTargetBitrate *)new char[bytes];
setupHeader(ptr, RtcpType::RTCP_XR, 0, bytes);
setupPadding(ptr, bytes - real_size);
return std::shared_ptr<RtcpXRTargetBitrate>(ptr, [](RtcpXRTargetBitrate *ptr) { delete[](char *) ptr; });
}
#if 0 #if 0
#include "Util/onceToken.h" #include "Util/onceToken.h"

View File

@ -772,7 +772,6 @@ private:
/** /**
* *
* @param size
*/ */
void net2Host(); void net2Host();
}; };
@ -814,6 +813,105 @@ private:
}; };
// RFC 4585: Feedback format.
//
// Common packet format:
//
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | BT=42 | reserved | block length |
// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// Target bitrate item (repeat as many times as necessary).
//
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | S | T | Target Bitrate |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// : ... :
//
// Spatial Layer (S): 4 bits
// Indicates which temporal layer this bitrate concerns.
//
// Temporal Layer (T): 4 bits
// Indicates which temporal layer this bitrate concerns.
//
// Target Bitrate: 24 bits
// The encoder target bitrate for this layer, in kbps.
//
// As an example of how S and T are intended to be used, VP8 simulcast will
// use a separate TargetBitrate message per stream, since they are transmitted
// on separate SSRCs, with temporal layers grouped by stream.
// If VP9 SVC is used, there will be only one SSRC, so each spatial and
// temporal layer combo used shall be specified in the TargetBitrate packet.
class RtcpXRTargetBitrateItem {
public:
friend class RtcpXRTargetBitrate;
#if __BYTE_ORDER == __BIG_ENDIAN
// Indicates which temporal layer this bitrate concerns.
uint32_t spatial_layer : 4;
// Indicates which temporal layer this bitrate concerns.
uint32_t temporal_layer : 4;
#else
// Indicates which temporal layer this bitrate concerns.
uint32_t temporal_layer : 4;
// Indicates which temporal layer this bitrate concerns.
uint32_t spatial_layer : 4;
#endif
//The encoder target bitrate for this layer, in kbps.
uint32_t target_bitrate : 24;
private:
/**
*
* 使net2Host转换成主机字节序后才可使用此函数
*/
std::string dumpString() const;
/**
*
*/
void net2Host();
};
class RtcpXRTargetBitrate : public RtcpHeader {
public:
friend class RtcpHeader;
uint32_t ssrc;
uint8_t bt;
uint8_t reserved;
uint16_t block_length;
RtcpXRTargetBitrateItem items;
/**
* RtcpXRTargetBitrate包RtcpHeader部分()
* @param item_count RtcpXRTargetBitrateItem对象个数
* @return RtcpXRTargetBitrate包
*/
static std::shared_ptr<RtcpXRTargetBitrate> create(size_t item_count);
/**
* RtcpXRTargetBitrateItem对象指针列表
* 使net2Host转换成主机字节序后才可使用此函数
*/
std::vector<RtcpXRTargetBitrateItem *> getItemList();
private:
/**
*
* 使net2Host转换成主机字节序后才可使用此函数
*/
std::string dumpString() const;
/**
*
* @param size
*/
void net2Host(size_t size);
};
#pragma pack(pop) #pragma pack(pop)
} // namespace mediakit } // namespace mediakit

View File

@ -23,11 +23,10 @@ void RtspMuxer::onRtp(RtpPacket::Ptr in, bool is_key) {
if (ref.rtp_stamp != in->getHeader()->stamp) { if (ref.rtp_stamp != in->getHeader()->stamp) {
// rtp时间戳变化才计算ntp节省cpu资源 [AUTO-TRANSLATED:729d54f2] // rtp时间戳变化才计算ntp节省cpu资源 [AUTO-TRANSLATED:729d54f2]
// Only calculate NTP when the RTP timestamp changes, saving CPU resources // Only calculate NTP when the RTP timestamp changes, saving CPU resources
int64_t stamp_ms = in->getStamp() * uint64_t(1000) / in->sample_rate;
int64_t stamp_ms_inc; int64_t stamp_ms_inc;
// 求rtp时间戳增量 [AUTO-TRANSLATED:f6ba022f] // 求rtp时间戳增量 [AUTO-TRANSLATED:f6ba022f]
// Get the RTP timestamp increment // Get the RTP timestamp increment
ref.stamp.revise(stamp_ms, stamp_ms, stamp_ms_inc, stamp_ms_inc); ref.stamp.revise(in->ntp_stamp, in->ntp_stamp, stamp_ms_inc, stamp_ms_inc);
ref.rtp_stamp = in->getHeader()->stamp; ref.rtp_stamp = in->getHeader()->stamp;
ref.ntp_stamp = stamp_ms_inc + _ntp_stamp_start; ref.ntp_stamp = stamp_ms_inc + _ntp_stamp_start;
} }

View File

@ -23,9 +23,18 @@
aux_source_directory(. TEST_SRC_LIST) aux_source_directory(. TEST_SRC_LIST)
find_package(PCAP QUIET)
foreach(TEST_SRC ${TEST_SRC_LIST}) foreach(TEST_SRC ${TEST_SRC_LIST})
get_filename_component(TEST_EXE_NAME ${TEST_SRC} NAME_WE) get_filename_component(TEST_EXE_NAME ${TEST_SRC} NAME_WE)
if(NOT PCAP_FOUND)
message(WARNING "PCAP 未找到")
if("${TEST_EXE_NAME}" MATCHES "test_rtp_pcap")
continue()
endif()
endif()
if(NOT TARGET ZLMediaKit::WebRTC) if(NOT TARGET ZLMediaKit::WebRTC)
# WebRTC # WebRTC
if("${TEST_EXE_NAME}" MATCHES "test_rtcp_nack") if("${TEST_EXE_NAME}" MATCHES "test_rtcp_nack")
@ -46,8 +55,14 @@ foreach(TEST_SRC ${TEST_SRC_LIST})
endif() endif()
if(CMAKE_SYSTEM_NAME MATCHES "Linux") if(CMAKE_SYSTEM_NAME MATCHES "Linux")
target_link_libraries(${TEST_EXE_NAME} -Wl,--start-group ${MK_LINK_LIBRARIES} -Wl,--end-group) target_link_libraries(${TEST_EXE_NAME} -Wl,--start-group ${MK_LINK_LIBRARIES} -Wl,--end-group)
else() else()
target_link_libraries(${TEST_EXE_NAME} ${MK_LINK_LIBRARIES}) target_link_libraries(${TEST_EXE_NAME} ${MK_LINK_LIBRARIES})
endif() endif()
endforeach() endforeach()
if(TARGET test_rtp_pcap)
target_include_directories(test_rtp_pcap SYSTEM PRIVATE ${PCAP_INCLUDE_DIRS})
target_link_libraries(test_rtp_pcap ${PCAP_LIBRARIES})
endif()

119
tests/test_ps.cpp Normal file
View File

@ -0,0 +1,119 @@
#include "Common/config.h"
#include "Http/HttpSession.h"
#include "Network/TcpServer.h"
#include "Rtmp/RtmpSession.h"
#include "Rtp/Decoder.h"
#include "Rtp/RtpProcess.h"
#include "Rtsp/RtspSession.h"
#include "Util/File.h"
#include "Util/MD5.h"
#include "Util/SSLBox.h"
#include "Util/logger.h"
#include "Util/util.h"
#include <iostream>
#include <map>
using namespace std;
using namespace toolkit;
using namespace mediakit;
static semaphore sem;
class PsProcess : public MediaSinkInterface, public std::enable_shared_from_this<PsProcess> {
public:
using Ptr = std::shared_ptr<PsProcess>;
PsProcess() {
MediaTuple media_info;
media_info.vhost = DEFAULT_VHOST;
media_info.app = "rtp";
media_info.stream = "000001";
_muxer = std::make_shared<MultiMediaSourceMuxer>(media_info, 0.0f, ProtocolOption());
}
~PsProcess() {
}
bool inputFrame(const Frame::Ptr &frame) override {
if (_muxer) {
_muxer->inputFrame(frame);
int64_t diff = frame->dts() - timeStamp_last;
if (diff > 0 && diff < 500) {
usleep(diff * 1000);
} else {
usleep(1 * 1000);
}
timeStamp_last = frame->dts();
}
return true;
}
bool addTrack(const Track::Ptr &track) override {
if (_muxer) {
return _muxer->addTrack(track);
}
return true;
}
void addTrackCompleted() override {
if (_muxer) {
_muxer->addTrackCompleted();
}
}
void resetTracks() override {
}
virtual void flush() override {}
private:
MultiMediaSourceMuxer::Ptr _muxer;
uint64_t timeStamp = 0;
uint64_t timeStamp_last = 0;
};
static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
FILE *fp = fopen(path, "rb");
if (!fp) {
WarnL << "open eqq failed:" << path;
return false;
}
fseek(fp, 0, SEEK_END);
long lSize = ftell(fp);
uint8_t *text = (uint8_t *)malloc(lSize);
rewind(fp);
fread(text, sizeof(char), lSize, fp);
PsProcess::Ptr ps_process = std::make_shared<PsProcess>();
DecoderImp::Ptr ps_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ps, ps_process.get());
if (ps_decoder) {
ps_decoder->input(text, lSize);
}
WarnL << lSize / 1024 << "KB";
fclose(fp);
return true;
}
int main(int argc, char *argv[]) {
// 设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel"));
// 启动异步日志线程
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
loadIniConfig((exeDir() + "config.ini").data());
TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer());
rtspSrv->start<RtspSession>(554); // 默认554
rtmpSrv->start<RtmpSession>(1935); // 默认1935
httpSrv->start<HttpSession>(81); // 默认80
if (argc == 2) {
auto poller = EventPollerPool::Instance().getPoller();
poller->async_first([poller, argv]() {
loadFile(argv[1], poller);
sem.post();
});
sem.wait();
sleep(1);
} else
ErrorL << "parameter error.";
return 0;
}

246
tests/test_rtp_pcap.cpp Normal file
View File

@ -0,0 +1,246 @@
#include "Common/config.h"
#include "Http/HttpSession.h"
#include "Network/TcpServer.h"
#include "Rtmp/RtmpSession.h"
#include "Rtp/RtpProcess.h"
#include "Rtsp/RtspSession.h"
#include "Util/logger.h"
#include "Util/util.h"
#include <iostream>
#include <map>
#include <pcap.h>
using namespace std;
using namespace toolkit;
using namespace mediakit;
/* 以太网帧头部 */
struct sniff_ethernet {
#define ETHER_ADDR_LEN 6
u_char ether_dhost[ETHER_ADDR_LEN]; /* 目的主机的地址 */
u_char ether_shost[ETHER_ADDR_LEN]; /* 源主机的地址 */
u_short ether_unused;
u_short ether_type; /* IP0x0800;IPV6:0x86DD; ARP:0x0806;RARP:0x8035 */
};
#define ETHERTYPE_IPV4 (0x0800)
#define ETHERTYPE_IPV6 (0x86DD)
#define ETHERTYPE_ARP (0x0806)
#define ETHERTYPE_RARP (0x8035)
/* IP数据包的头部 */
struct sniff_ip {
#if BYTE_ORDER == LITTLE_ENDIAN
u_int ip_hl : 4, /* 头部长度 */
ip_v : 4; /* 版本号 */
#if BYTE_ORDER == BIG_ENDIAN
u_int ip_v : 4, /* 版本号 */
ip_hl : 4; /* 头部长度 */
#endif
#endif /* not _IP_VHL */
u_char ip_tos; /* 服务的类型 */
u_short ip_len; /* 总长度 */
u_short ip_id; /*包标志号 */
u_char ip_flag;
u_char ip_off; /* 碎片偏移 */
#define IP_RF 0x8000 /* 保留的碎片标志 */
#define IP_DF 0x4000 /* dont fragment flag */
#define IP_MF 0x2000 /* 多碎片标志*/
#define IP_OFFMASK 0x1fff /*分段位 */
u_char ip_ttl; /* 数据包的生存时间 */
u_char ip_p; /* 所使用的协议:1 ICMP;2 IGMP;4 IP;6 TCP;17 UDP;89 OSPF */
u_short ip_sum; /* 校验和 */
struct in_addr ip_src, ip_dst; /* 源地址、目的地址*/
};
#define IPTYPE_ICMP (1)
#define IPTYPE_IGMP (2)
#define IPTYPE_IP (4)
#define IPTYPE_TCP (6)
#define IPTYPE_UDP (17)
#define IPTYPE_OSPF (89)
typedef u_int tcp_seq;
/* TCP 数据包的头部 */
struct sniff_tcp {
u_short th_sport; /* 源端口 */
u_short th_dport; /* 目的端口 */
tcp_seq th_seq; /* 包序号 */
tcp_seq th_ack; /* 确认序号 */
#if BYTE_ORDER == LITTLE_ENDIAN
u_int th_x2 : 4, /* 还没有用到 */
th_off : 4; /* 数据偏移 */
#endif
#if BYTE_ORDER == BIG_ENDIAN
u_int th_off : 4, /* 数据偏移*/
th_x2 : 4; /*还没有用到 */
#endif
u_char th_flags;
#define TH_FIN 0x01
#define TH_SYN 0x02
#define TH_RST 0x04
#define TH_PUSH 0x08
#define TH_ACK 0x10
#define TH_URG 0x20
#define TH_ECE 0x40
#define TH_CWR 0x80
#define TH_FLAGS (TH_FINTH_SYNTH_RSTTH_ACKTH_URGTH_ECETH_CWR)
u_short th_win; /* TCP滑动窗口 */
u_short th_sum; /* 头部校验和 */
u_short th_urp; /* 紧急服务位 */
};
/* UDP header */
struct sniff_udp {
uint16_t sport; /* source port */
uint16_t dport; /* destination port */
uint16_t udp_length;
uint16_t udp_sum; /* checksum */
};
struct rtp_stream {
uint64_t stamp = 0;
uint64_t stamp_last = 0;
std::shared_ptr<RtpProcess> rtp_process;
Socket::Ptr sock;
struct sockaddr_storage addr;
};
static semaphore sem;
unordered_map<uint32_t, rtp_stream> rtp_streams_map;
#if defined(ENABLE_RTPPROXY)
void processRtp(uint32_t stream_id, const char *rtp, int &size, bool is_udp, const EventPoller::Ptr &poller) {
rtp_stream &stream = rtp_streams_map[stream_id];
if (!stream.rtp_process) {
auto process = RtpProcess::createProcess(MediaTuple{DEFAULT_VHOST, kRtpAppName, to_string(stream_id), ""});
stream.rtp_process = process;
struct sockaddr_storage addr;
memset(&addr, 0, sizeof(addr));
addr.ss_family = AF_INET;
auto sock = Socket::createSocket(poller);
stream.sock = sock;
stream.addr = addr;
}
try {
stream.rtp_process->inputRtp(is_udp, stream.sock, rtp, size, (struct sockaddr *)&stream.addr, &stream.stamp);
} catch (std::exception &ex) {
WarnL << "Input rtp failed: " << ex.what();
return ;
}
auto diff = static_cast<int64_t>(stream.stamp - stream.stamp_last);
if (diff > 0 && diff < 500) {
usleep(diff * 1000);
} else {
usleep(1 * 1000);
}
stream.stamp_last = stream.stamp;
rtp = nullptr;
size = 0;
}
#endif // #if defined(ENABLE_RTPPROXY)
static bool loadFile(const char *path, const EventPoller::Ptr &poller) {
char errbuf[PCAP_ERRBUF_SIZE] = {'\0'};
std::shared_ptr<pcap_t> handle(pcap_open_offline(path, errbuf), [](pcap_t *handle) {
sem.post();
if (handle) {
pcap_close(handle);
}
});
if (!handle) {
WarnL << "open file failed:" << path << "error: " << errbuf;
return false;
}
auto total_size = std::make_shared<size_t>(0);
struct pcap_pkthdr header = {0};
while (true) {
const u_char *pkt_buff = pcap_next(handle.get(), &header);
if (!pkt_buff) {
PrintE("pcapng read over.");
break;
}
struct sniff_ethernet *ethernet = (struct sniff_ethernet *)pkt_buff;
int eth_len = sizeof(struct sniff_ethernet); // 以太网头的长度
int ip_len = sizeof(struct sniff_ip); // ip头的长度
int tcp_len = sizeof(struct sniff_tcp); // tcp头的长度
int udp_headr_len = sizeof(struct sniff_udp); // udp头的长度
/*解析网络层 IP头*/
if (ntohs(ethernet->ether_type) == ETHERTYPE_IPV4) { // IPV4
struct sniff_ip *ip = (struct sniff_ip *)(pkt_buff + eth_len);
ip_len = (ip->ip_hl & 0x0f) * 4; // ip头的长度
unsigned char *saddr = (unsigned char *)&ip->ip_src.s_addr; // 网络字节序转换成主机字节序
unsigned char *daddr = (unsigned char *)&ip->ip_dst.s_addr;
/*解析传输层 TCP、UDP、ICMP*/
if (ip->ip_p == IPTYPE_TCP) { // TCP
PrintI("ip->proto:TCP "); // 传输层用的哪一个协议
struct sniff_tcp *tcp = (struct sniff_tcp *)(pkt_buff + eth_len + ip_len);
PrintI("tcp_sport = %u ", tcp->th_sport);
PrintI("tcp_dport = %u ", tcp->th_dport);
for (int i = 0; *(pkt_buff + eth_len + ip_len + tcp_len + i) != '\0'; i++) {
PrintI("%02x ", *(pkt_buff + eth_len + ip_len + tcp_len + i));
}
} else if (ip->ip_p == IPTYPE_UDP) { // UDP
// PrintI("ip->proto:UDP "); // 传输层用的哪一个协议
struct sniff_udp *udp = (struct sniff_udp *)(pkt_buff + eth_len + ip_len);
auto udp_pack_len = ntohs(udp->udp_length);
uint32_t src_ip = ntohl(ip->ip_src.s_addr);
uint32_t dst_ip = ntohl(ip->ip_dst.s_addr);
uint16_t src_port = ntohs(udp->sport);
uint16_t dst_port = ntohs(udp->dport);
uint32_t stream_id = (src_ip << 16) + src_port + (dst_ip << 4) + dst_port;
const char *rtp = reinterpret_cast<const char *>(pkt_buff + eth_len + ip_len + udp_headr_len);
auto rtp_len = udp_pack_len - udp_headr_len;
#if defined(ENABLE_RTPPROXY)
processRtp(stream_id, rtp, rtp_len, true, poller);
#endif // #if defined(ENABLE_RTPPROXY)
} else if (ip->ip_p == IPTYPE_ICMP) { // ICMP
PrintI("ip->proto:CCMP "); // 传输层用的哪一个协议
} else {
PrintI("未识别的传输层协议");
}
} else if (ntohs(ethernet->ether_type) == ETHERTYPE_IPV6) { // IPV6
PrintI("It's IPv6! ");
} else {
PrintI("既不是IPV4也不是IPV6 ");
}
}
return true;
}
int main(int argc, char *argv[]) {
// 设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel"));
// 启动异步日志线程
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
loadIniConfig((exeDir() + "config.ini").data());
TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer());
rtspSrv->start<RtspSession>(554); // 默认554
rtmpSrv->start<RtmpSession>(1935); // 默认1935
httpSrv->start<HttpSession>(81); // 默认80
if (argc == 2) {
auto poller = EventPollerPool::Instance().getPoller();
poller->async_first([poller, argv]() {
loadFile(argv[1], poller);
sem.post();
});
sem.wait();
sleep(1);
} else {
ErrorL << "parameter error.";
}
return 0;
}

View File

@ -15,8 +15,8 @@ var ZLMRTCClient = (function (exports) {
CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED' CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED'
}; };
const VERSION$1 = '1.1.0'; const VERSION$1 = '1.1.1';
const BUILD_DATE = 'Thu Jun 20 2024 16:15:41 GMT+0800 (China Standard Time)'; const BUILD_DATE = 'Tue Nov 19 2024 20:10:15 GMT+0800 (China Standard Time)';
// Copyright (C) <2018> Intel Corporation // Copyright (C) <2018> Intel Corporation
// //
@ -9090,6 +9090,12 @@ var ZLMRTCClient = (function (exports) {
if (this.options.useCamera) { if (this.options.useCamera) {
if (this.options.videoEnable) videoConstraints = new VideoTrackConstraints(VideoSourceInfo.CAMERA); if (this.options.videoEnable) videoConstraints = new VideoTrackConstraints(VideoSourceInfo.CAMERA);
if (this.options.audioEnable) audioConstraints = new AudioTrackConstraints(AudioSourceInfo.MIC); if (this.options.audioEnable) audioConstraints = new AudioTrackConstraints(AudioSourceInfo.MIC);
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
} else { } else {
if (this.options.videoEnable) { if (this.options.videoEnable) {
videoConstraints = new VideoTrackConstraints(VideoSourceInfo.SCREENCAST); videoConstraints = new VideoTrackConstraints(VideoSourceInfo.SCREENCAST);
@ -9099,17 +9105,14 @@ var ZLMRTCClient = (function (exports) {
// error shared display media not only audio // error shared display media not only audio
error(this.TAG, 'error paramter'); error(this.TAG, 'error paramter');
} }
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
} }
} }
if (this.options.resolution.w != 0 && this.options.resolution.h != 0 && typeof videoConstraints == 'object') { if (this.options.resolution.w != 0 && this.options.resolution.h != 0 && typeof videoConstraints == 'object') {
videoConstraints.resolution = new Resolution(this.options.resolution.w, this.options.resolution.h); videoConstraints.resolution = new Resolution(this.options.resolution.w, this.options.resolution.h);
} }
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
MediaStreamFactory.createMediaStream(new StreamConstraints(audioConstraints, videoConstraints)).then(stream => { MediaStreamFactory.createMediaStream(new StreamConstraints(audioConstraints, videoConstraints)).then(stream => {
this._localStream = stream; this._localStream = stream;
this.dispatch(Events$1.WEBRTC_ON_LOCAL_STREAM, stream); this.dispatch(Events$1.WEBRTC_ON_LOCAL_STREAM, stream);

File diff suppressed because one or more lines are too long

View File

@ -15,8 +15,8 @@ var ZLMRTCClient = (function (exports) {
CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED' CAPTURE_STREAM_FAILED: 'CAPTURE_STREAM_FAILED'
}; };
const VERSION$1 = '1.1.0'; const VERSION$1 = '1.1.1';
const BUILD_DATE = 'Thu Jun 20 2024 16:15:41 GMT+0800 (China Standard Time)'; const BUILD_DATE = 'Tue Nov 19 2024 20:10:15 GMT+0800 (China Standard Time)';
// Copyright (C) <2018> Intel Corporation // Copyright (C) <2018> Intel Corporation
// //
@ -9090,6 +9090,12 @@ var ZLMRTCClient = (function (exports) {
if (this.options.useCamera) { if (this.options.useCamera) {
if (this.options.videoEnable) videoConstraints = new VideoTrackConstraints(VideoSourceInfo.CAMERA); if (this.options.videoEnable) videoConstraints = new VideoTrackConstraints(VideoSourceInfo.CAMERA);
if (this.options.audioEnable) audioConstraints = new AudioTrackConstraints(AudioSourceInfo.MIC); if (this.options.audioEnable) audioConstraints = new AudioTrackConstraints(AudioSourceInfo.MIC);
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
} else { } else {
if (this.options.videoEnable) { if (this.options.videoEnable) {
videoConstraints = new VideoTrackConstraints(VideoSourceInfo.SCREENCAST); videoConstraints = new VideoTrackConstraints(VideoSourceInfo.SCREENCAST);
@ -9099,17 +9105,14 @@ var ZLMRTCClient = (function (exports) {
// error shared display media not only audio // error shared display media not only audio
error(this.TAG, 'error paramter'); error(this.TAG, 'error paramter');
} }
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
} }
} }
if (this.options.resolution.w != 0 && this.options.resolution.h != 0 && typeof videoConstraints == 'object') { if (this.options.resolution.w != 0 && this.options.resolution.h != 0 && typeof videoConstraints == 'object') {
videoConstraints.resolution = new Resolution(this.options.resolution.w, this.options.resolution.h); videoConstraints.resolution = new Resolution(this.options.resolution.w, this.options.resolution.h);
} }
if (typeof videoConstraints == 'object' && this.options.videoId != '') {
videoConstraints.deviceId = this.options.videoId;
}
if (typeof audioConstraints == 'object' && this.options.audioId != '') {
audioConstraints.deviceId = this.options.audioId;
}
MediaStreamFactory.createMediaStream(new StreamConstraints(audioConstraints, videoConstraints)).then(stream => { MediaStreamFactory.createMediaStream(new StreamConstraints(audioConstraints, videoConstraints)).then(stream => {
this._localStream = stream; this._localStream = stream;
this.dispatch(Events$1.WEBRTC_ON_LOCAL_STREAM, stream); this.dispatch(Events$1.WEBRTC_ON_LOCAL_STREAM, stream);

File diff suppressed because one or more lines are too long