desc:推流到Rtsp服务器

main
xiaowusky 2 years ago
parent d5eee6f9db
commit ac769de55d

@ -73,11 +73,4 @@ object PushHelper {
)
mPusher.start(ip, port, String.format("%s.sdp", id), Pusher.TransType.EASY_RTP_OVER_TCP)
}
@JvmField
var mPpsSps = ByteArray(0)
@JvmField
var h264 = ByteArray(1920 * 1080)
}

@ -7,10 +7,14 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
import com.yinuo.library.vlc.PushHelper;
import com.yinuo.library.vlc.TaskUtils;
import com.yinuo.library.vlc.utils.LogUtils;
import org.easydarwin.easypusher.BuildConfig;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -30,13 +34,13 @@ public class AudioEncoderCore extends MediaEncoderCore implements Runnable {
protected static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private int mSampleRate = 44100;
private int mSampleRate = 8000;
private int mChannelCount = 1;
private int mBitRate = 128000;
private int mBitRate = 16000;
private int mMaxInputSize = 16384;
private int mMaxInputSize = 1920;
private AudioRecord mAudioRecord;
@ -92,20 +96,20 @@ public class AudioEncoderCore extends MediaEncoderCore implements Runnable {
mChannelConfig, AUDIO_FORMAT);
mAudioRecord = new AudioRecord(
MediaRecorder.AudioSource.CAMCORDER, // source
MediaRecorder.AudioSource.MIC, // source
mSampleRate, // sample rate, hz
mChannelConfig, // channels
AUDIO_FORMAT, // audio format
minBufferSize * 4); // buffer size (bytes)
minBufferSize); // buffer size (bytes)
}
@Override
public void start() {
if (!mRecording) {
mRecording = true;
mAudioRecord.startRecording();
TaskUtils.execute(this);
// mRecording = true;
// mAudioRecord.startRecording();
//
// TaskUtils.execute(this);
}
}
@ -152,6 +156,89 @@ public class AudioEncoderCore extends MediaEncoderCore implements Runnable {
mEncoder.queueInputBuffer(bufferIndex, 0, len, ptsUs, 0);
}
}
}
@Override
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer mBuffer = ByteBuffer.allocate(10240);
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
// mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
encodedData.get(mBuffer.array(), 7, mBufferInfo.size);
encodedData.clear();
mBuffer.position(7 + mBufferInfo.size);
addADTStoPacket(mBuffer.array(), mBufferInfo.size + 7);
mBuffer.flip();
PushHelper.INSTANCE.pushData(mBuffer.array(), mBufferInfo.size + 7, mBufferInfo.presentationTimeUs / 1000);
if (BuildConfig.DEBUG)
Log.i(TAG, String.format("push audio stamp:%d", mBufferInfo.presentationTimeUs / 1000));
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
private void addADTStoPacket(byte[] packet, int packetLen) {
packet[0] = (byte) 0xFF;
packet[1] = (byte) 0xF1;
packet[2] = (byte) (((2 - 1) << 6) + (11 << 2) + (1 >> 2));
packet[3] = (byte) (((1 & 3) << 6) + (packetLen >> 11));
packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
packet[6] = (byte) 0xFC;
}
}

@ -2,13 +2,16 @@ package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import com.yinuo.library.vlc.PushHelper;
import com.yinuo.library.vlc.utils.LogUtils;
import org.easydarwin.easypusher.BuildConfig;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
@ -51,9 +54,12 @@ public abstract class MediaEncoderCore {
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
long timeStamp;
public void drainEncoder(boolean endOfStream) {
// LogUtils.v(String.format("%s drainEncoder: end = %b", getClass().getSimpleName(), endOfStream));
buildKeyFrame();
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
@ -61,108 +67,103 @@ public abstract class MediaEncoderCore {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
byte[] mSpsPps = new byte[0];
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
// add
boolean sync = false;
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {// sps
sync = (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (!sync) {
byte[] temp = new byte[mBufferInfo.size];
encodedData.get(temp);
PushHelper.mPpsSps = temp;
mEncoder.releaseOutputBuffer(encoderStatus, false);
continue;
} else {
PushHelper.mPpsSps = new byte[0];
}
}
sync |= (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
int len = PushHelper.mPpsSps.length + mBufferInfo.size;
if (len > PushHelper.h264.length) {
PushHelper.h264 = new byte[len];
}
if (sync) {
System.arraycopy(PushHelper.mPpsSps, 0, PushHelper.h264, 0, PushHelper.mPpsSps.length);
encodedData.get(PushHelper.h264, PushHelper.mPpsSps.length, mBufferInfo.size);
PushHelper.INSTANCE.pushData(PushHelper.h264, PushHelper.mPpsSps.length + mBufferInfo.size, mBufferInfo.presentationTimeUs / 1000);
if (BuildConfig.DEBUG)
Log.i(TAG, String.format("push i video stamp:%d", mBufferInfo.presentationTimeUs / 1000));
} else {
encodedData.get(PushHelper.h264, 0, mBufferInfo.size);
PushHelper.INSTANCE.pushData(PushHelper.h264, mBufferInfo.size, mBufferInfo.presentationTimeUs / 1000);
if (BuildConfig.DEBUG)
Log.i(TAG, String.format("push video stamp:%d", mBufferInfo.presentationTimeUs / 1000));
byte[] outData = new byte[mBufferInfo.size];
//从buff中读取数据到outData中
encodedData.get(outData);
//记录pps和spspps和sps数据开头是0x00 0x00 0x00 0x01 0x67
// 0x67对应十进制103
if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 103) {
mSpsPps = outData;
} else if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 101) {
//关键帧开始规则是0x00 0x00 0x00 0x01 0x650x65对应十进制101
//在关键帧前面加上pps和sps数据
byte[] iframeData = new byte[mSpsPps.length + outData.length];
System.arraycopy(mSpsPps, 0, iframeData, 0, mSpsPps.length);
System.arraycopy(outData, 0, iframeData, mSpsPps.length, outData.length);
outData = iframeData;
}
//save(outData, 0, outData.length, Environment.getExternalStorageDirectory() + "/easy.h264", true);
PushHelper.INSTANCE.pushData(outData, outData.length, mBufferInfo.presentationTimeUs / 1000);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
private void buildKeyFrame() {
if (System.currentTimeMillis() - timeStamp >= 1000) {//1000毫秒后设置参数
timeStamp = System.currentTimeMillis();
if (Build.VERSION.SDK_INT >= 23) {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mEncoder.setParameters(params);
}
}
}
public void release() {
LogUtils.v(String.format("%s.release", getClass().getSimpleName()));
@ -179,4 +180,35 @@ public abstract class MediaEncoderCore {
protected abstract boolean isSurfaceInput();
/**
*
*
* @param buffer
* @param offset
* @param length
* @param path
* @param append
*/
public static void save(byte[] buffer, int offset, int length, String path, boolean append) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(path, append);
fos.write(buffer, offset, length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.flush();
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}

Loading…
Cancel
Save