This commit is contained in:
li 2024-05-29 17:56:58 +08:00
parent 469a9af166
commit 8b83220ccd
16 changed files with 991 additions and 153 deletions

View File

@ -47,6 +47,8 @@
android:screenOrientation="portrait"/>
<activity android:name=".PlayerDemoActivity"
android:screenOrientation="portrait"/>
<activity android:name=".PusherDemoActivity"
android:screenOrientation="portrait"/>
</application>
</manifest>

View File

@ -19,6 +19,7 @@ class MainActivity : AppCompatActivity() {
}
fun toPushActivity(view: View) {
startActivity(Intent(this, PusherDemoActivity::class.java))
}

View File

@ -1,30 +1,70 @@
package com.zlmediakit.webrtc
import android.os.Bundle
import android.os.Handler
import android.view.View
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.zlm.rtc.ZLMRTCPlayer
import com.zlm.rtc.play.ZLMRTCPlayerImpl
import kotlinx.android.synthetic.main.activity_player.surface_view_renderer
import kotlinx.android.synthetic.main.activity_player.tv_app
import kotlinx.android.synthetic.main.activity_player.tv_stream_id
class PlayerDemoActivity:AppCompatActivity() {
class PlayerDemoActivity : AppCompatActivity() {
private val player: ZLMRTCPlayer by lazy {
ZLMRTCPlayerImpl(this)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_player)
ZLMRTCPlayer.shareInstance().bind(applicationContext,surface_view_renderer,true)
//ffmpeg -re -stream_loop -1 -i "D:\li\hot\data\data\baseline.mp4" -vcodec h264 -acodec aac -f rtsp -rtsp_transport tcp -bf 0 rtsp://zlmediakit.com/live/li
//ffmpeg -re -stream_loop -1 -i "D:\li\hot\data\data\test.mp4" -vcodec h264 -acodec aac -f flv -bf 0 rtmp://zlmediakit.com/live/li
Handler().postDelayed({
ZLMRTCPlayer.shareInstance().play("live","li")
},1000)
player.bind(surface_view_renderer, false)
}
override fun onDestroy() {
super.onDestroy()
ZLMRTCPlayer.shareInstance().destroy()
player.stop()
}
fun onPlayClick(view: View) {
player.play(tv_app.text.toString(), tv_stream_id.text.toString())
}
fun onPauseClick(view: View) {
player.pause()
}
fun onStopClick(view: View) {
player.stop()
}
fun onResumeClick(view: View) {
player.resume()
}
fun onCapture(view: View) {
player.capture {
Toast.makeText(this, "capture ok", Toast.LENGTH_SHORT).show()
}
}
fun onRecord(view: View) {
player.record(10 * 1000) {
Toast.makeText(this, "" + it, Toast.LENGTH_SHORT).show()
}
}
fun onVolume(view: View) {
player.setSpeakerphoneOn(true)
}
}

View File

@ -1,12 +0,0 @@
package com.zlmediakit.webrtc
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
class PushDemoActivity: AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
}
}

View File

@ -0,0 +1,38 @@
package com.zlmediakit.webrtc
import android.os.Bundle
import android.view.View
import androidx.appcompat.app.AppCompatActivity
import com.zlm.rtc.ZLMRTCPusher
import com.zlm.rtc.push.ZLMRTCPusherImpl
import kotlinx.android.synthetic.main.activity_player.tv_app
import kotlinx.android.synthetic.main.activity_player.tv_stream_id
class PusherDemoActivity: AppCompatActivity() {
private val pusher: ZLMRTCPusher by lazy {
ZLMRTCPusherImpl(this)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_pusher)
}
fun onPushCamera(view: View) {
pusher.push(tv_app.text.toString(), tv_stream_id.text.toString())
}
override fun onDestroy() {
super.onDestroy()
pusher.stop()
}
}

View File

@ -1,13 +1,128 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
android:layout_height="match_parent">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/surface_view_renderer"
android:layout_width="wrap_content"
android:layout_height="240dp"/>
android:layout_height="240dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/surface_view_renderer"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<androidx.appcompat.widget.AppCompatTextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"
android:gravity="end"
android:text="app: " />
<androidx.appcompat.widget.AppCompatEditText
android:id="@+id/tv_app"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="4"
android:text="live"
android:gravity="center"/>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/surface_view_renderer">
<androidx.appcompat.widget.AppCompatTextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"
android:gravity="end"
android:text="streamId:" />
<androidx.appcompat.widget.AppCompatEditText
android:id="@+id/tv_stream_id"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="4"
android:text="li"
android:gravity="center"/>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="play"
android:onClick="onPlayClick"
android:textAllCaps="false" />
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="pause"
android:onClick="onPauseClick"
android:textAllCaps="false" />
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="resume"
android:onClick="onResumeClick"
android:textAllCaps="false" />
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="stop"
android:onClick="onStopClick"
android:textAllCaps="false" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="volume"
android:onClick="onVolume"
android:textAllCaps="false" />
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="capture"
android:onClick="onCapture"
android:textAllCaps="false" />
<androidx.appcompat.widget.AppCompatButton
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="record"
android:onClick="onRecord"
android:textAllCaps="false" />
</LinearLayout>
</LinearLayout>
</RelativeLayout>

View File

@ -0,0 +1,86 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/surface_view_renderer"
android:layout_width="wrap_content"
android:layout_height="240dp" />
<androidx.appcompat.widget.LinearLayoutCompat
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
app:layout_constraintTop_toBottomOf="@id/surface_view_renderer"
>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<androidx.appcompat.widget.AppCompatTextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"
android:gravity="end"
android:text="app: " />
<androidx.appcompat.widget.AppCompatEditText
android:id="@+id/tv_app"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="4"
android:text="live"
android:gravity="center"/>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/surface_view_renderer">
<androidx.appcompat.widget.AppCompatTextView
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"
android:gravity="end"
android:text="streamId:" />
<androidx.appcompat.widget.AppCompatEditText
android:id="@+id/tv_stream_id"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="4"
android:text="li"
android:gravity="center"/>
</LinearLayout>
<androidx.appcompat.widget.AppCompatButton
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Push Camera"
android:textAllCaps="false"
android:onClick="onPushCamera"/>
<androidx.appcompat.widget.AppCompatButton
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Push Screen"
android:textAllCaps="false"
android:onClick="onPushScreen"/>
<androidx.appcompat.widget.AppCompatButton
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Push File"
android:textAllCaps="false"
android:onClick="onPushFile"/>
</androidx.appcompat.widget.LinearLayoutCompat>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -64,10 +64,21 @@ Java_com_zlm_rtc_NativeLib_exchangeSessionDescription(JNIEnv *env, jobject thiz,
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_zlm_rtc_NativeLib_makeUrl(JNIEnv *env, jobject thiz, jstring app, jstring stream_id) {
Java_com_zlm_rtc_NativeLib_makePlayUrl(JNIEnv *env, jobject thiz, jstring app, jstring stream_id) {
const char *appString = env->GetStringUTFChars(app, 0);
const char *streamIdString = env->GetStringUTFChars(stream_id, 0);
char url[100];
sprintf(url,"https://zlmediakit.com/index/api/webrtc?app=%s&stream=%s&type=play",appString,streamIdString);
return env->NewStringUTF(url);
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_zlm_rtc_NativeLib_makePushUrl(JNIEnv *env, jobject thiz, jstring app, jstring stream_id) {
const char *appString = env->GetStringUTFChars(app, 0);
const char *streamIdString = env->GetStringUTFChars(stream_id, 0);
char url[100];
sprintf(url,"https://zlmediakit.com/index/api/webrtc?app=%s&stream=%s&type=push",appString,streamIdString);
return env->NewStringUTF(url);
}

View File

@ -10,8 +10,9 @@ class NativeLib {
external fun exchangeSessionDescription(description:String): String
external fun makeUrl(app:String,streamId:String): String
external fun makePlayUrl(app:String,streamId:String): String
external fun makePushUrl(app:String,streamId:String): String
companion object {
// Used to load the 'rtc' library on application startup.

View File

@ -7,11 +7,11 @@ import org.webrtc.SurfaceViewRenderer
abstract class ZLMRTCPlayer {
companion object {
fun shareInstance(): ZLMRTCPlayer {
return ZLMRTCPlayerImpl()
}
}
// companion object {
// fun shareInstance(): ZLMRTCPlayer {
// return ZLMRTCPlayerImpl(this)
// }
// }
@ -19,7 +19,7 @@ abstract class ZLMRTCPlayer {
constructor()
public abstract fun bind(context: Context,surface: SurfaceViewRenderer, localPreview:Boolean)
public abstract fun bind(surface: SurfaceViewRenderer, localPreview:Boolean)
//拉流接口
@ -35,14 +35,12 @@ abstract class ZLMRTCPlayer {
public abstract fun pause()
public abstract fun destroy()
public abstract fun resume()
public abstract fun capture(listener: (bitmap: Bitmap) -> Unit)
public abstract fun record(record_duration: Long, result: (path: String) -> Unit)
public abstract fun record(duration: Long, result: (path: String) -> Unit)
//推流接口

View File

@ -1,45 +0,0 @@
package com.zlm.rtc
import android.graphics.Bitmap
abstract class ZLMRTCPush {
constructor()
public abstract fun init(serverUrl: String)
//拉流接口
public abstract fun play(app: String, streamId: String)
public abstract fun setSpeakerphoneOn(on: Boolean)
public abstract fun setLocalMute(on: Boolean)
public abstract fun stop()
public abstract fun pause()
public abstract fun resume()
public abstract fun capture(listener: (bitmap: Bitmap) -> Unit)
public abstract fun record(record_duration: Long, result: (path: String) -> Unit)
//推流接口
// public abstract fun startLocalPreview()
//
// public abstract fun stopLocalPreview()
//
// public abstract fun startPublishing()
//
// public abstract fun stopPublishing()
//
}

View File

@ -0,0 +1,10 @@
package com.zlm.rtc
import android.graphics.Bitmap
abstract class ZLMRTCPusher {
abstract fun push(app: String, streamId: String)
abstract fun stop()
}

View File

@ -190,8 +190,10 @@ public class PeerConnectionClient {
private RtcEventLog rtcEventLog;
// Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes
// recorded audio samples to an output file.
@Nullable
private RecordedAudioToFileController saveRecordedAudioToFile = null;
// @Nullable
// private RecordedAudioToFileController saveRecordedAudioToFile = null;
private VideoFileRecorder saveVideoFileRecorder = null;
/**
@ -489,7 +491,7 @@ public class PeerConnectionClient {
WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
}
WebRtcAudioRecord.setOnAudioSamplesReady(saveRecordedAudioToFile);
WebRtcAudioRecord.setOnAudioSamplesReady(saveVideoFileRecorder);
// Set audio record error callbacks.
WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecordErrorCallback() {
@ -589,7 +591,7 @@ public class PeerConnectionClient {
};
return JavaAudioDeviceModule.builder(appContext)
.setSamplesReadyCallback(saveRecordedAudioToFile)
.setSamplesReadyCallback(saveVideoFileRecorder)
.setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC)
.setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS)
.setAudioRecordErrorCallback(audioRecordErrorCallback)
@ -688,10 +690,9 @@ public class PeerConnectionClient {
}
}
if (saveRecordedAudioToFile != null) {
if (saveRecordedAudioToFile.start()) {
Log.d(TAG, "Recording input audio to file is activated");
}
if (saveVideoFileRecorder == null) {
saveVideoFileRecorder = new VideoFileRecorder();
}
Log.d(TAG, "Peer connection created.");
@ -872,6 +873,23 @@ public class PeerConnectionClient {
});
}
public void setRecordEnable(final boolean enable, String savePath) {
executor.execute(() -> {
if (saveVideoFileRecorder != null) {
if (enable) {
try {
saveVideoFileRecorder.start(savePath, rootEglBase.getEglBaseContext(), false);
} catch (IOException e) {
//throw new RuntimeException(e);
}
} else {
saveVideoFileRecorder.release();
}
}
});
}
public void createOffer(final BigInteger handleId) {
Log.d(TAG, "peerConnectionMap get handleId=" + peerConnectionMap.size());
executor.execute(() -> {
@ -1362,7 +1380,11 @@ public class PeerConnectionClient {
remoteVideoTrack.setEnabled(true);
connection.videoTrack = remoteVideoTrack;
connection.videoTrack.addSink(videoSinkMap.get(connection.handleId));
if (saveVideoFileRecorder != null) {
connection.videoTrack.addSink(saveVideoFileRecorder);
}
events.onRemoteRender(connection.handleId);
}
}
});
@ -1453,10 +1475,6 @@ public class PeerConnectionClient {
if (peerConnection != null && !isError) {
Log.d(TAG, "Set local SDP from " + sdp.type);
peerConnection.setLocalDescription(sdpObserver, sdp);
// MediaStream localMediaStream = factory.createLocalMediaStream("ARDAMS");
// localMediaStream.addTrack(localAudioTrack);
// peerConnection.addStream(localMediaStream);
}
});
}

View File

@ -0,0 +1,348 @@
package com.zlm.rtc.client;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import org.webrtc.EglBase;
import org.webrtc.GlRectDrawer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoFrameDrawer;
import org.webrtc.VideoSink;
import org.webrtc.audio.JavaAudioDeviceModule;
import org.webrtc.voiceengine.WebRtcAudioRecord;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* @author leo
* @version 1.0
* @className VideoFileRenderer
* @description TODO
* @date 2022/9/27 11:12
**/
class VideoFileRecorder implements VideoSink, JavaAudioDeviceModule.SamplesReadyCallback, WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback {
private static final String TAG = "VideoFileRenderer";
private String mOutFilePath;
private HandlerThread renderThread;
private Handler renderThreadHandler;
private HandlerThread audioThread;
private Handler audioThreadHandler;
private int outputFileWidth = -1;
private int outputFileHeight = -1;
private ByteBuffer[] encoderOutputBuffers;
private ByteBuffer[] audioInputBuffers;
private ByteBuffer[] audioOutputBuffers;
private EglBase eglBase;
private EglBase.Context sharedContext;
private VideoFrameDrawer frameDrawer;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 15; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private MediaMuxer mediaMuxer;
private MediaCodec encoder;
private MediaCodec.BufferInfo bufferInfo, audioBufferInfo;
private int trackIndex = -1;
private int audioTrackIndex;
private boolean withAudio = false;
private boolean isEnableRecord = false;
private GlRectDrawer drawer;
private Surface surface;
private MediaCodec audioEncoder;
VideoFileRecorder() {
Log.i(TAG, "=====================>VideoFileRecorder");
renderThread = new HandlerThread(TAG + "RenderThread");
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
isEnableRecord = false;
}
public void start(String outputFile, final EglBase.Context sharedContext, boolean withAudio) throws IOException {
Log.i(TAG, "=====================>start");
isEnableRecord = true;
trackIndex = -1;
outputFileWidth = -1;
this.sharedContext = sharedContext;
this.withAudio = withAudio;
if (this.withAudio) {
audioThread = new HandlerThread(TAG + "AudioThread");
audioThread.start();
audioThreadHandler = new Handler(audioThread.getLooper());
} else {
audioThread = null;
audioThreadHandler = null;
}
bufferInfo = new MediaCodec.BufferInfo();
this.mOutFilePath = outputFile;
mediaMuxer = new MediaMuxer(outputFile,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
audioTrackIndex = this.withAudio ? -1 : 0;
}
/**
* Release all resources. All already posted frames will be rendered first.
*/
public void release() {
isEnableRecord = false;
if (audioThreadHandler != null) {
audioThreadHandler.post(() -> {
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
}
audioThread.quit();
});
}
if (renderThreadHandler != null) {
renderThreadHandler.post(() -> {
if (encoder != null) {
encoder.stop();
encoder.release();
}
eglBase.release();
mediaMuxer.stop();
mediaMuxer.release();
renderThread.quit();
});
}
}
public boolean isRecording() {
return isEnableRecord;
}
private void initVideoEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
encoder = MediaCodec.createEncoderByType(MIME_TYPE);
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
renderThreadHandler.post(() -> {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
surface = encoder.createInputSurface();
eglBase.createSurface(surface);
eglBase.makeCurrent();
drawer = new GlRectDrawer();
});
} catch (Exception e) {
Log.wtf(TAG, e);
}
}
@Override
public void onFrame(VideoFrame frame) {
if (!isEnableRecord) return;
Log.e(TAG, "onFrame");
frame.retain();
if (outputFileWidth == -1) {
outputFileWidth = frame.getRotatedWidth();
outputFileHeight = frame.getRotatedHeight();
initVideoEncoder();
}
renderThreadHandler.post(() -> renderFrameOnRenderThread(frame));
}
private void renderFrameOnRenderThread(VideoFrame frame) {
if (frameDrawer == null) {
frameDrawer = new VideoFrameDrawer();
}
frameDrawer.drawFrame(frame, drawer, null, 0, 0, outputFileWidth, outputFileHeight);
frame.release();
drainEncoder();
eglBase.swapBuffers();
}
private boolean encoderStarted = false;
private volatile boolean muxerStarted = false;
private long videoFrameStart = 0;
private void drainEncoder() {
if (!encoderStarted) {
encoder.start();
encoderOutputBuffers = encoder.getOutputBuffers();
encoderStarted = true;
return;
}
while (true) {
try {
int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = encoder.getOutputBuffers();
Log.e(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = encoder.getOutputFormat();
Log.e(TAG, "encoder output format changed: " + newFormat);
trackIndex = mediaMuxer.addTrack(newFormat);
if (audioTrackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
Log.e(TAG, "mediaMuxer start");
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(bufferInfo.offset);
encodedData.limit(bufferInfo.offset + bufferInfo.size);
if (videoFrameStart == 0 && bufferInfo.presentationTimeUs != 0) {
videoFrameStart = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs -= videoFrameStart;
if (muxerStarted)
mediaMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
isEnableRecord = isEnableRecord && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
encoder.releaseOutputBuffer(encoderStatus, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
} catch (Exception e) {
Log.e(TAG, "encoder error, " + e);
break;
}
}
}
private long presTime = 0L;
private void drainAudio() {
if (audioBufferInfo == null)
audioBufferInfo = new MediaCodec.BufferInfo();
while (true) {
int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
audioOutputBuffers = audioEncoder.getOutputBuffers();
Log.w(TAG, "encoder output buffers changed");
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
MediaFormat newFormat = audioEncoder.getOutputFormat();
Log.w(TAG, "encoder output format changed: " + newFormat);
audioTrackIndex = mediaMuxer.addTrack(newFormat);
if (trackIndex != -1 && !muxerStarted) {
mediaMuxer.start();
muxerStarted = true;
}
if (!muxerStarted)
break;
} else if (encoderStatus < 0) {
Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus);
} else { // encoderStatus >= 0
try {
ByteBuffer encodedData = audioOutputBuffers[encoderStatus];
if (encodedData == null) {
Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null");
break;
}
// It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
encodedData.position(audioBufferInfo.offset);
encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size);
if (muxerStarted)
mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo);
isEnableRecord = isEnableRecord && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0;
audioEncoder.releaseOutputBuffer(encoderStatus, false);
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
} catch (Exception e) {
Log.wtf(TAG, e);
break;
}
}
}
}
@Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
if (!isEnableRecord) return;
Log.e(TAG, "onWebRtcAudioRecordSamplesReady " + isEnableRecord);
if (!isEnableRecord)
return;
if (audioThreadHandler != null) {
audioThreadHandler.post(() -> {
if (audioEncoder == null) try {
audioEncoder = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, audioSamples.getChannelCount());
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioSamples.getSampleRate());
format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
audioEncoder.start();
audioInputBuffers = audioEncoder.getInputBuffers();
audioOutputBuffers = audioEncoder.getOutputBuffers();
} catch (IOException exception) {
Log.wtf(TAG, exception);
}
int bufferIndex = audioEncoder.dequeueInputBuffer(0);
if (bufferIndex >= 0) {
ByteBuffer buffer = audioInputBuffers[bufferIndex];
buffer.clear();
byte[] data = audioSamples.getData();
buffer.put(data);
audioEncoder.queueInputBuffer(bufferIndex, 0, data.length, presTime, 0);
presTime += data.length * 125 / 12; // 1000000 microseconds / 48000hz / 2 bytes
}
drainAudio();
});
}
}
@Override
public void onWebRtcAudioRecordSamplesReady(WebRtcAudioRecord.AudioSamples samples) {
onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(samples.getAudioFormat(),
samples.getChannelCount(), samples.getSampleRate(), samples.getData()));
}
}

View File

@ -2,6 +2,8 @@ package com.zlm.rtc.play
import android.content.Context
import android.graphics.Bitmap
import android.media.AudioManager
import android.os.Handler
import android.util.Log
import com.zlm.rtc.NativeLib
import com.zlm.rtc.ZLMRTCPlayer
@ -18,56 +20,36 @@ import org.webrtc.SessionDescription
import org.webrtc.StatsReport
import org.webrtc.SurfaceViewRenderer
import org.webrtc.VideoCapturer
import java.io.File
import java.math.BigInteger
import kotlin.random.Random
class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEvents {
private var context: Context? = null
class ZLMRTCPlayerImpl(val context: Context) : ZLMRTCPlayer(),
PeerConnectionClient.PeerConnectionEvents {
private var surfaceViewRenderer: SurfaceViewRenderer? = null
private val eglBase = EglBase.create()
private var eglBase: EglBase? = null
private val peerConnectionClient: PeerConnectionClient? by lazy {
private var defaultFps = 24
PeerConnectionClient(
context, eglBase,
PeerConnectionClient.PeerConnectionParameters(
false,
true,
false,
1280,
720,
15,
0,
"H264",
true,
true,
0,
"OPUS",
false,
false,
false,
false,
false,
false,
false,
false, false, false, null
), this
)
}
private var peerConnectionClient: PeerConnectionClient? = null
init {
private var localHandleId = BigInteger.valueOf(Random(1024).nextLong())
private var audioManager: AudioManager? = null
private var app: String = ""
private var streamId: String = ""
}
private fun logger(msg: String) {
Log.i("ZLMRTCPlayerImpl", msg)
}
fun createVideoCapture(context: Context?): VideoCapturer? {
private fun createVideoCapture(context: Context?): VideoCapturer? {
val videoCapturer: VideoCapturer? = if (Camera2Enumerator.isSupported(context)) {
createCameraCapture(Camera2Enumerator(context))
} else {
@ -105,72 +87,108 @@ class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEven
return null
}
override fun bind(context: Context, surface: SurfaceViewRenderer, localPreview: Boolean) {
this.context = context
private fun initPeerConnectionClient(): PeerConnectionClient {
eglBase = EglBase.create()
return PeerConnectionClient(
context, eglBase,
PeerConnectionClient.PeerConnectionParameters(
false,
false,
false,
1280,
720,
defaultFps,
1024 * 1000 * 2,
"H264",
true,
true,
0,
"OPUS",
false,
false,
false,
false,
false,
false,
false,
false, false, false, null
), this
)
}
override fun bind(surface: SurfaceViewRenderer, localPreview: Boolean) {
this.surfaceViewRenderer = surface
this.surfaceViewRenderer?.init(eglBase.eglBaseContext,null)
this.peerConnectionClient?.setAudioEnabled(true)
peerConnectionClient?.createPeerConnectionFactory(PeerConnectionFactory.Options())
peerConnectionClient?.createPeerConnection(createVideoCapture(context), BigInteger.ONE)
peerConnectionClient?.createOffer((BigInteger.ONE))
audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager
audioManager?.isSpeakerphoneOn = false
}
override fun play(app: String, streamId: String) {
this.app = app
this.streamId = streamId
if (peerConnectionClient == null) peerConnectionClient = initPeerConnectionClient()
surfaceViewRenderer?.init(eglBase?.eglBaseContext, null)
peerConnectionClient?.setAudioEnabled(true)
peerConnectionClient?.createPeerConnectionFactory(PeerConnectionFactory.Options())
peerConnectionClient?.createPeerConnection(createVideoCapture(context), localHandleId)
peerConnectionClient?.createOffer(localHandleId)
}
override fun setSpeakerphoneOn(on: Boolean) {
audioManager?.isSpeakerphoneOn = on
}
override fun setLocalMute(on: Boolean) {
audioManager?.isSpeakerphoneOn = on
}
override fun stop() {
surfaceViewRenderer?.clearImage()
surfaceViewRenderer?.release()
peerConnectionClient?.stopVideoSource()
peerConnectionClient?.close()
peerConnectionClient = null
}
override fun pause() {
surfaceViewRenderer?.pauseVideo()
}
override fun destroy() {
peerConnectionClient?.close()
}
override fun resume() {
surfaceViewRenderer?.setFpsReduction(defaultFps.toFloat())
}
override fun capture(listener: (bitmap: Bitmap) -> Unit) {
surfaceViewRenderer?.addFrameListener({
listener.invoke(it)
}, 1f)
}
override fun record(record_duration: Long, result: (path: String) -> Unit) {
override fun record(duration: Long, result: (path: String) -> Unit) {
val savePath = context.cacheDir.absoluteFile.absolutePath + File.separator + System.currentTimeMillis() + ".mp4"
peerConnectionClient?.setRecordEnable(true,savePath)
Handler().postDelayed({
peerConnectionClient?.setRecordEnable(false, savePath)
}, duration)
}
override fun onLocalDescription(handleId: BigInteger?, sdp: SessionDescription?) {
val url = NativeLib().makeUrl("live", "li")
logger("handleId: " + url)
val url = NativeLib().makePlayUrl(app, streamId)
logger("handleId: $url")
logger("handleId: " + sdp?.description)
val doPost = HttpClient.doPost(
url,
mutableMapOf(Pair("sdp", sdp?.description)),
mutableMapOf()
)
val result = JSONObject(doPost)
val code = result.getInt("code")
if (code == 0) {
logger("handleId: " + doPost)
logger("handleId: $doPost")
val sdp = result.getString("sdp")
peerConnectionClient?.setRemoteDescription(
handleId,
@ -178,7 +196,7 @@ class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEven
)
} else {
val msg = result.getString("msg")
logger("handleId: " + msg)
logger("handleId: $msg")
}
}
@ -219,12 +237,16 @@ class ZLMRTCPlayerImpl : ZLMRTCPlayer(), PeerConnectionClient.PeerConnectionEven
override fun onLocalRender(handleId: BigInteger?) {
logger("onLocalRender: " + handleId)
//peerConnectionClient?.setVideoRender(handleId, surfaceViewRenderer)
// if (handleId == localHandleId) {
// peerConnectionClient?.setVideoRender(handleId, surfaceViewRenderer)
// }
}
override fun onRemoteRender(handleId: BigInteger?) {
logger("onRemoteRender: " + handleId)
peerConnectionClient?.setVideoRender(handleId, surfaceViewRenderer)
if (handleId == localHandleId) {
peerConnectionClient?.setVideoRender(handleId, surfaceViewRenderer)
}
}

View File

@ -0,0 +1,205 @@
package com.zlm.rtc.push
import android.content.Context
import android.graphics.Bitmap
import android.media.AudioManager
import android.util.Log
import com.zlm.rtc.NativeLib
import com.zlm.rtc.ZLMRTCPusher
import com.zlm.rtc.client.HttpClient
import com.zlm.rtc.client.PeerConnectionClient
import org.json.JSONObject
import org.webrtc.Camera1Enumerator
import org.webrtc.Camera2Enumerator
import org.webrtc.CameraEnumerator
import org.webrtc.EglBase
import org.webrtc.IceCandidate
import org.webrtc.PeerConnectionFactory
import org.webrtc.SessionDescription
import org.webrtc.StatsReport
import org.webrtc.SurfaceViewRenderer
import org.webrtc.VideoCapturer
import java.math.BigInteger
import kotlin.random.Random
class ZLMRTCPusherImpl(val context:Context) :ZLMRTCPusher(),
PeerConnectionClient.PeerConnectionEvents {
private var peerConnectionClient: PeerConnectionClient? = null
private var eglBase: EglBase? = null
private var defaultFps = 24
private var surfaceViewRenderer: SurfaceViewRenderer? = null
private var localHandleId = BigInteger.valueOf(Random(2048).nextLong())
private var app: String = ""
private var streamId: String = ""
private fun initPeerConnectionClient(): PeerConnectionClient {
eglBase = EglBase.create()
return PeerConnectionClient(
context, eglBase,
PeerConnectionClient.PeerConnectionParameters(
true,
false,
false,
1280,
720,
defaultFps,
1024 * 1000 * 2,
"H264",
true,
true,
0,
"OPUS",
false,
false,
false,
false,
false,
false,
false,
false, false, false, null
), this
)
}
private fun createVideoCapture(context: Context?): VideoCapturer? {
val videoCapturer: VideoCapturer? = if (Camera2Enumerator.isSupported(context)) {
createCameraCapture(Camera2Enumerator(context))
} else {
createCameraCapture(Camera1Enumerator(true))
}
return videoCapturer
}
/**
* 创建相机媒体流
*/
private fun createCameraCapture(enumerator: CameraEnumerator): VideoCapturer? {
val deviceNames = enumerator.deviceNames
// Front facing camera not found, try something else
for (deviceName in deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
// First, try to find front facing camera
for (deviceName in deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
val videoCapturer: VideoCapturer? = enumerator.createCapturer(deviceName, null)
if (videoCapturer != null) {
return videoCapturer
}
}
}
return null
}
private fun logger(msg: String) {
Log.i("ZLMRTCPusherImpl", msg)
}
override fun push(app: String, streamId: String) {
this.app = app
this.streamId = streamId
if (peerConnectionClient == null) peerConnectionClient = initPeerConnectionClient()
surfaceViewRenderer?.init(eglBase?.eglBaseContext, null)
peerConnectionClient?.setAudioEnabled(true)
peerConnectionClient?.setVideoEnabled(true)
peerConnectionClient?.createPeerConnectionFactory(PeerConnectionFactory.Options())
peerConnectionClient?.createPeerConnection(createVideoCapture(context), localHandleId)
peerConnectionClient?.createOffer(localHandleId)
}
override fun stop() {
surfaceViewRenderer?.clearImage()
surfaceViewRenderer?.release()
peerConnectionClient?.stopVideoSource()
peerConnectionClient?.close()
peerConnectionClient = null
}
override fun onLocalDescription(handleId: BigInteger?, sdp: SessionDescription?) {
val url = NativeLib().makePushUrl(app, streamId)
logger("handleId: $url")
logger("handleId: " + sdp?.description)
val doPost = HttpClient.doPost(
url,
mutableMapOf(Pair("sdp", sdp?.description)),
mutableMapOf()
)
val result = JSONObject(doPost)
val code = result.getInt("code")
if (code == 0) {
logger("handleId: $doPost")
val sdp = result.getString("sdp")
peerConnectionClient?.setRemoteDescription(
handleId,
SessionDescription(SessionDescription.Type.ANSWER, sdp)
)
} else {
val msg = result.getString("msg")
logger("handleId: $msg")
}
}
override fun onIceCandidate(handleId: BigInteger?, candidate: IceCandidate?) {
}
override fun onIceCandidatesRemoved(
handleId: BigInteger?,
candidates: Array<out IceCandidate>?
) {
}
override fun onIceConnected(handleId: BigInteger?) {
}
override fun onIceDisconnected(handleId: BigInteger?) {
}
override fun onPeerConnectionClosed(handleId: BigInteger?) {
}
override fun onPeerConnectionStatsReady(
handleId: BigInteger?,
reports: Array<out StatsReport>?
) {
}
override fun onPeerConnectionError(handleId: BigInteger?, description: String?) {
}
override fun onLocalRender(handleId: BigInteger?) {
if (handleId == localHandleId) {
peerConnectionClient?.setVideoRender(handleId, surfaceViewRenderer)
}
}
override fun onRemoteRender(handleId: BigInteger?) {
}
}