desc:h265 encode

main
xiaowusky 2 years ago
parent 64a7cafe3f
commit 91054ba593

@ -11,6 +11,7 @@ import com.common.commonlib.db.entity.GasType
import com.common.commonlib.db.entity.Warning import com.common.commonlib.db.entity.Warning
import com.common.commonlib.net.bean.BaseResponse import com.common.commonlib.net.bean.BaseResponse
import com.common.commonlib.net.callback.RequestResultCallBack import com.common.commonlib.net.callback.RequestResultCallBack
import com.yinuo.library.vlc.PushHelper
import com.yinuo.library.vlc.TxtOverlay import com.yinuo.library.vlc.TxtOverlay
import com.yinuo.safetywatcher.watcher.net.api.UploadFileApi import com.yinuo.safetywatcher.watcher.net.api.UploadFileApi
import com.yinuo.safetywatcher.watcher.ui.view.CommonDialog import com.yinuo.safetywatcher.watcher.ui.view.CommonDialog
@ -27,6 +28,7 @@ object TestUtils {
fun insertData() { fun insertData() {
TxtOverlay.setShowTip("co2:50%@co:44%") TxtOverlay.setShowTip("co2:50%@co:44%")
PushHelper.setPushUrl("rtsp://192.168.5.16:554/123")
GlobalScope.launch() { GlobalScope.launch() {
// 插入类型 // 插入类型
val typeDao = DBUtils.gasTypeDao() val typeDao = DBUtils.gasTypeDao()

@ -50,6 +50,6 @@ object LztekUtil {
fun setRtspUrl(str: String?) { fun setRtspUrl(str: String?) {
this.rtspUrl = str this.rtspUrl = str
rtspUrl?.let { PushHelper.setPushUrl(it) } // rtspUrl?.let { PushHelper.setPushUrl(it) }
} }
} }

@ -50,11 +50,14 @@ object PushHelper {
mIp = mUri.host mIp = mUri.host
mPort = mUri.port.toString() mPort = mUri.port.toString()
mId = mUri.path mId = mUri.path
if (mId?.startsWith("/")!!){
mId = mId!!.substring(1)
}
} }
fun startStream() { fun startStream(hevc: Boolean) {
stop() stop()
initHelper() initHelper(hevc)
} }
fun stop() { fun stop() {
@ -74,14 +77,14 @@ object PushHelper {
mPusher.push(h264, 0, length, timeStamp, 1) mPusher.push(h264, 0, length, timeStamp, 1)
} }
private fun initHelper() { private fun initHelper(hevc: Boolean) {
if (mIp.isNullOrEmpty() || mPort.isNullOrEmpty() || mId.isNullOrEmpty()) { if (mIp.isNullOrEmpty() || mPort.isNullOrEmpty() || mId.isNullOrEmpty()) {
LogUtils.e("PushHelper error, please setPushUrl first!!") LogUtils.e("PushHelper error, please setPushUrl first!!")
return return
} }
mPusher.initPush(mApplicationContext, callback) mPusher.initPush(mApplicationContext, callback)
mPusher.setMediaInfo( mPusher.setMediaInfo(
Pusher.Codec.EASY_SDK_VIDEO_CODEC_H264, if (hevc) Pusher.Codec.EASY_SDK_VIDEO_CODEC_H265 else Pusher.Codec.EASY_SDK_VIDEO_CODEC_H264,
24, 24,
Pusher.Codec.EASY_SDK_AUDIO_CODEC_AAC, Pusher.Codec.EASY_SDK_AUDIO_CODEC_AAC,
1, 1,

@ -74,7 +74,7 @@ public class RtspSurfaceRender implements GLSurfaceView.Renderer, RtspHelper.Rts
LogUtils.v(String.format("onSurfaceChanged: width = %d, height = %d", width, height)); LogUtils.v(String.format("onSurfaceChanged: width = %d, height = %d", width, height));
mProgram = new RGBProgram(mGLSurfaceView.getContext(), width, height); mProgram = new RGBProgram(mGLSurfaceView.getContext(), width, height);
mBuffer = ByteBuffer.allocateDirect(width * height * 4).order(ByteOrder.nativeOrder()); mBuffer = ByteBuffer.allocateDirect(width * height * 4).order(ByteOrder.nativeOrder());
mVideoEncoder = new MovieEncoder1(mGLSurfaceView.getContext(), width, height); mVideoEncoder = new MovieEncoder1(mGLSurfaceView.getContext(), width, height, false);
RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, this); RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, this);
} }

@ -74,7 +74,7 @@ public class RtspSurfaceRender2 implements RtspHelper.RtspCallback {
mheight = height; mheight = height;
if (mVideoEncoder == null) { if (mVideoEncoder == null) {
LogUtils.v("init"); LogUtils.v("init");
mVideoEncoder = new MovieEncoder1(mSurfaceView.getContext(), width, height); mVideoEncoder = new MovieEncoder1(mSurfaceView.getContext(), width, height, true);
mVideoBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); mVideoBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, RtspSurfaceRender2.this); RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, RtspSurfaceRender2.this);
renderThread.start(); renderThread.start();

@ -8,6 +8,7 @@ import android.media.MediaCodecInfo;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.media.MediaRecorder; import android.media.MediaRecorder;
import android.util.Log; import android.util.Log;
import android.view.Surface;
import com.yinuo.library.vlc.PushHelper; import com.yinuo.library.vlc.PushHelper;
import com.yinuo.library.vlc.TaskUtils; import com.yinuo.library.vlc.TaskUtils;
@ -118,6 +119,11 @@ public class AudioEncoderCore extends MediaEncoderCore implements Runnable {
mRecording = false; mRecording = false;
} }
@Override
public Surface getInputSurface() {
return null;
}
@Override @Override
protected boolean isSurfaceInput() { protected boolean isSurfaceInput() {
return false; return false;

@ -25,7 +25,6 @@ import android.os.Message;
import com.yinuo.library.vlc.utils.LogUtils; import com.yinuo.library.vlc.utils.LogUtils;
import java.io.File;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
/** /**
@ -64,7 +63,7 @@ public abstract class BaseMovieEncoder implements Runnable {
private WindowSurface mInputWindowSurface; private WindowSurface mInputWindowSurface;
private EglCore mEglCore; private EglCore mEglCore;
private VideoEncoderCore mVideoEncoder; private MediaEncoderCore mVideoEncoder;
private AudioEncoderCore mAudioEncoder; private AudioEncoderCore mAudioEncoder;
@ -79,10 +78,13 @@ public abstract class BaseMovieEncoder implements Runnable {
protected int mWidth, mHeight; protected int mWidth, mHeight;
public BaseMovieEncoder(Context context, int width, int height) { private boolean useHevc = false;
public BaseMovieEncoder(Context context, int width, int height, boolean hevc) {
mContext = context; mContext = context;
mWidth = width; mWidth = width;
mHeight = height; mHeight = height;
useHevc = hevc;
} }
/** /**
@ -308,7 +310,12 @@ public abstract class BaseMovieEncoder implements Runnable {
mWidth = width; mWidth = width;
mHeight = height; mHeight = height;
mVideoEncoder = new VideoEncoderCore(muxer, width, height); if (useHevc){
mVideoEncoder = new VideoEncoderCoreHevc(muxer, width, height);
}else {
mVideoEncoder = new VideoEncoderCoreAvc(muxer, width, height);
}
mAudioEncoder = new AudioEncoderCore(muxer); mAudioEncoder = new AudioEncoderCore(muxer);
mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE); mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);

@ -1,18 +1,15 @@
package com.yinuo.library.vlc.encoder; package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Build; import android.os.Build;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.view.Surface;
import com.yinuo.library.vlc.PushHelper;
import com.yinuo.library.vlc.utils.LogUtils; import com.yinuo.library.vlc.utils.LogUtils;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
/** /**
* Created by liwentian on 17/8/1. * Created by liwentian on 17/8/1.
@ -56,104 +53,11 @@ public abstract class MediaEncoderCore {
*/ */
long timeStamp; long timeStamp;
public void drainEncoder(boolean endOfStream) { public abstract Surface getInputSurface();
// LogUtils.v(String.format("%s drainEncoder: end = %b", getClass().getSimpleName(), endOfStream));
buildKeyFrame(); public abstract void drainEncoder(boolean endOfStream);
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) { void buildKeyFrame() {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
byte[] mSpsPps = new byte[0];
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
byte[] outData = new byte[mBufferInfo.size];
//从buff中读取数据到outData中
encodedData.get(outData);
//记录pps和spspps和sps数据开头是0x00 0x00 0x00 0x01 0x67
// 0x67对应十进制103
if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 103) {
mSpsPps = outData;
} else if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 101) {
//关键帧开始规则是0x00 0x00 0x00 0x01 0x650x65对应十进制101
//在关键帧前面加上pps和sps数据
byte[] iframeData = new byte[mSpsPps.length + outData.length];
System.arraycopy(mSpsPps, 0, iframeData, 0, mSpsPps.length);
System.arraycopy(outData, 0, iframeData, mSpsPps.length, outData.length);
outData = iframeData;
}
//save(outData, 0, outData.length, Environment.getExternalStorageDirectory() + "/easy.h264", true);
PushHelper.INSTANCE.pushData(outData, outData.length, mBufferInfo.presentationTimeUs / 1000);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
private void buildKeyFrame() {
if (System.currentTimeMillis() - timeStamp >= 1000) {//1000毫秒后设置参数 if (System.currentTimeMillis() - timeStamp >= 1000) {//1000毫秒后设置参数
timeStamp = System.currentTimeMillis(); timeStamp = System.currentTimeMillis();
if (Build.VERSION.SDK_INT >= 23) { if (Build.VERSION.SDK_INT >= 23) {

@ -16,8 +16,8 @@ public class MovieEncoder1 extends BaseMovieEncoder {
private volatile RGBProgram mRGBProgram; private volatile RGBProgram mRGBProgram;
private volatile ByteBuffer mYUVBuffer; private volatile ByteBuffer mYUVBuffer;
public MovieEncoder1(Context context, int width, int height) { public MovieEncoder1(Context context, int width, int height, boolean hevc) {
super(context, width, height); super(context, width, height, hevc);
} }
@Override @Override

@ -1,114 +0,0 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCore extends MediaEncoderCore {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 24; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int BIT_RATE = 4000000;
private Surface mInputSurface;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(AndroidMuxer muxer, int width, int height) {
super(muxer);
prepareEncoder(width, height);
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
// mMuxer = new MediaMuxer(outputFile.toString(),
// MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
private void prepareEncoder(int width, int height) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
/**
* Returns the encoder's input surface.
*/
public Surface getInputSurface() {
return mInputSurface;
}
@Override
public void start() {
drainEncoder(false);
}
@Override
public void stop() {
drainEncoder(true);
}
@Override
protected boolean isSurfaceInput() {
return true;
}
}

@ -0,0 +1,205 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import com.yinuo.library.vlc.PushHelper;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCoreAvc extends MediaEncoderCore {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 24; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int BIT_RATE = 4000000;
private Surface mInputSurface;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCoreAvc(AndroidMuxer muxer, int width, int height) {
super(muxer);
prepareEncoder(width, height);
PushHelper.INSTANCE.startStream(false);
}
private void prepareEncoder(int width, int height) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
@Override
public void drainEncoder(boolean endOfStream) {
buildKeyFrame();
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
byte[] mSpsPps = new byte[0];
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
byte[] outData = new byte[mBufferInfo.size];
//从buff中读取数据到outData中
encodedData.get(outData);
//记录pps和spspps和sps数据开头是0x00 0x00 0x00 0x01 0x67
// 0x67对应十进制103
if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 103) {
mSpsPps = outData;
} else if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 101) {
//关键帧开始规则是0x00 0x00 0x00 0x01 0x650x65对应十进制101
//在关键帧前面加上pps和sps数据
byte[] iframeData = new byte[mSpsPps.length + outData.length];
System.arraycopy(mSpsPps, 0, iframeData, 0, mSpsPps.length);
System.arraycopy(outData, 0, iframeData, mSpsPps.length, outData.length);
outData = iframeData;
}
//save(outData, 0, outData.length, Environment.getExternalStorageDirectory() + "/easy.h264", true);
PushHelper.INSTANCE.pushData(outData, outData.length, mBufferInfo.presentationTimeUs / 1000);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
/**
* Returns the encoder's input surface.
*/
@Override
public Surface getInputSurface() {
return mInputSurface;
}
@Override
public void start() {
drainEncoder(false);
}
@Override
public void stop() {
drainEncoder(true);
}
@Override
protected boolean isSurfaceInput() {
return true;
}
}

@ -0,0 +1,205 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import com.yinuo.library.vlc.PushHelper;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCoreHevc extends MediaEncoderCore {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/hevc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 24; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int BIT_RATE = 4000000;
private Surface mInputSurface;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCoreHevc(AndroidMuxer muxer, int width, int height) {
super(muxer);
prepareEncoder(width, height);
PushHelper.INSTANCE.startStream(true);
}
private void prepareEncoder(int width, int height) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
/**
* Returns the encoder's input surface.
*/
@Override
public Surface getInputSurface() {
return mInputSurface;
}
public static final int NAL_I = 19;
public static final int NAL_VPS = 32;
private byte[] vps_sps_pps_buf;
@Override
public void drainEncoder(boolean endOfStream) {
buildKeyFrame();
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
byte[] mSpsPps = new byte[0];
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
byte[] outData = new byte[mBufferInfo.size];
encodedData.get(outData);
int offset = 4;
if (outData[2] == 0x01) {
offset = 3;
}
int type = (outData[offset] & 0x7E) >> 1;
if (type == NAL_VPS) {
vps_sps_pps_buf = outData;
} else if (type == NAL_I) {
byte[] newBuf = new byte[vps_sps_pps_buf.length + outData.length];
System.arraycopy(vps_sps_pps_buf, 0, newBuf, 0, vps_sps_pps_buf.length);
System.arraycopy(outData, 0, newBuf, vps_sps_pps_buf.length, outData.length);
outData = newBuf;
}
PushHelper.INSTANCE.pushData(outData, outData.length, mBufferInfo.presentationTimeUs / 1000);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
@Override
public void start() {
drainEncoder(false);
}
@Override
public void stop() {
drainEncoder(true);
}
@Override
protected boolean isSurfaceInput() {
return true;
}
}
Loading…
Cancel
Save