desc:引入VLC

main
xiaowusky 2 years ago
parent 5f64b44e5f
commit c25ebcc6e4

@ -15,7 +15,7 @@ android {
ndk {
//SOso
abiFilters "arm64-v8a"
abiFilters "armeabi-v7a"
}
}
@ -36,10 +36,6 @@ dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation 'androidx.appcompat:appcompat:1.4.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.3'
implementation(name: 'libuvccamera-release', ext: 'aar') {
exclude module: 'support-v4'
exclude module: 'appcompat-v7'
}
implementation 'io.reactivex.rxjava2:rxjava:2.1.6'
implementation 'io.reactivex.rxjava2:rxandroid:2.0.1'
implementation 'com.github.bumptech.glide:glide:4.12.0'
@ -50,6 +46,8 @@ dependencies {
implementation project(path: ':library-ijkplayer')
implementation project(path: ':library-serialPort')
implementation project(path: ':library-common')
implementation project(path: ':library-vlc')
implementation(name: 'libvlc-3.0.0', ext: 'aar')
//excel
implementation rootProject.ext.dependencies.jxl
//

Binary file not shown.

@ -8,3 +8,6 @@ const val MAX_QUERY_TIME_INTERVAL = ONE_DAY_MS
// 默认查询时间间隔
const val DEFAULT_QUERY_TIME_INTERVAL = ONE_HOUR_MS * 6
//摄像头RTSP地址
const val CAMERA_URL = "rtsp://admin:Aa123456@192.168.5.200:554/Streaming/Channels/101"

@ -33,7 +33,7 @@ class HistoryVideoActivity : BaseActivity() {
override fun getOptions(): List<CommonTopBar.Option>? {
val ops = mutableListOf<CommonTopBar.Option>()
ops.add(CommonTopBar.Option(0, getString(R.string.search)))
// ops.add(CommonTopBar.Option(0, getString(R.string.search)))
ops.add(CommonTopBar.Option(1, getString(R.string.export)))
return ops
}

@ -1,11 +1,14 @@
package com.yinuo.safetywatcher.watcher.ui
import android.content.Intent
import android.opengl.GLSurfaceView
import android.os.Build
import android.view.View
import androidx.annotation.RequiresApi
import com.yinuo.library.vlc.RtspSurfaceRender
import com.yinuo.safetywatcher.databinding.ActivityHomeBinding
import com.yinuo.safetywatcher.watcher.base.NoOptionsActivity
import com.yinuo.safetywatcher.watcher.constant.CAMERA_URL
import com.yinuo.safetywatcher.watcher.services.HeartbeatService
import com.yinuo.safetywatcher.watcher.utils.BatteryHelper
import com.yinuo.safetywatcher.watcher.utils.SimHelper
@ -29,6 +32,7 @@ class HomeActivity : NoOptionsActivity() {
override fun initView() {
initTopbarHelper()
HeartbeatService.actionStart(this@HomeActivity)
setForCamera()
mBinding.apply {
itemSetting.setOnClickListener {
startActivity(Intent(this@HomeActivity, SettingActivity::class.java))
@ -50,6 +54,14 @@ class HomeActivity : NoOptionsActivity() {
}
}
private fun setForCamera() {
mBinding.surface.setEGLContextClientVersion(3);
val mRender = RtspSurfaceRender(mBinding.surface)
mRender.setRtspUrl(CAMERA_URL)
mBinding.surface.setRenderer(mRender)
mBinding.surface.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
}
private fun initTopbarHelper() {
BatteryHelper.init(this@HomeActivity)
WifiHelper.init(this@HomeActivity)

@ -3,8 +3,8 @@
android:layout_width="match_parent"
android:layout_height="match_parent">
<View
android:id="@+id/video_view"
<android.opengl.GLSurfaceView
android:id="@+id/surface"
android:layout_width="match_parent"
android:layout_height="match_parent" />

@ -0,0 +1 @@
/build

@ -0,0 +1,16 @@
apply from: "${rootProject.rootDir}/buildCommon/commonLibConfig.gradle"
project.ext.setLibDefaultConfig project
android {
lintOptions {
abortOnError false
}
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation(name: 'libvlc-3.0.0', ext: 'aar')
implementation project(path: ':library-common')
}

Binary file not shown.

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.yinuo.library.vlc">
<uses-permission android:name="android.permission.RECORD_AUDIO" />
</manifest>

@ -0,0 +1,133 @@
package com.yinuo.library.vlc;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import static android.opengl.GLES20.glGetAttribLocation;
import static android.opengl.GLES20.glGetUniformLocation;
import static android.opengl.GLES20.glUniformMatrix4fv;
import static android.opengl.Matrix.scaleM;
import static android.opengl.Matrix.setIdentityM;
/**
* Created by liwentian on 17/6/22.
*/
public class RGBProgram extends ShaderProgram {
protected final int mUniformSTextureLocation;
protected final int mUniformMatrixLocation;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
public static final float TEXTURE_NO_ROTATION[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
// Attribute locations
private final int aPositionLocation;
private final int aTextureCoordinatesLocation;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private int mTextureId;
private ByteBuffer mBuffer;
private float[] mMatrix = new float[16];
public RGBProgram(Context context, int width, int height) {
super(context, R.raw.rgb_vertex, R.raw.rgb_fragment, width, height);
mUniformSTextureLocation = glGetUniformLocation(program, "s_texture");
mUniformMatrixLocation = glGetUniformLocation(program, "u_Matrix");
aPositionLocation = glGetAttribLocation(program, "a_Position");
aTextureCoordinatesLocation = glGetAttribLocation(program, "a_TextureCoordinates");
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mBuffer = ByteBuffer.allocateDirect(width * height * 4)
.order(ByteOrder.nativeOrder());
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
mTextureId = textures[0];
mGLCubeBuffer.clear();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(TEXTURE_NO_ROTATION).position(0);
}
public void setUniforms(byte[] data) {
setUniforms(data, 0);
}
public void setUniforms(byte[] data, int rotateDegrees) {
setUniforms(data, 1f, 1f, rotateDegrees);
}
public void setUniforms(byte[] data, float scaleX, float scaleY, int rotateDegrees) {
mBuffer.position(0);
mBuffer.put(data, 0, width * height * 4);
mBuffer.position(0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height,
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mBuffer);
GLES20.glUniform1i(mUniformSTextureLocation, 0);
mGLCubeBuffer.position(0);
GLES20.glVertexAttribPointer(aPositionLocation, 2, GLES20.GL_FLOAT, false, 0, mGLCubeBuffer);
GLES20.glEnableVertexAttribArray(aPositionLocation);
mGLTextureBuffer.position(0);
GLES20.glVertexAttribPointer(aTextureCoordinatesLocation, 2, GLES20.GL_FLOAT, false, 0, mGLTextureBuffer);
GLES20.glEnableVertexAttribArray(aTextureCoordinatesLocation);
setIdentityM(mMatrix, 0);
scaleM(mMatrix, 0, scaleX, scaleY, 1);
Matrix.rotateM(mMatrix, 0, rotateDegrees, 0.0f, 0.0f, 1.0f);
glUniformMatrix4fv(mUniformMatrixLocation, 1, false, mMatrix, 0);
}
public void draw() {
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(aPositionLocation);
GLES20.glDisableVertexAttribArray(aTextureCoordinatesLocation);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
}
}

@ -0,0 +1,45 @@
package com.yinuo.library.vlc;
import android.content.Context;
import android.content.res.Resources;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* Created by liwentian on 17/6/22.
*/
public class ResourceUtils {
public static String readText(Context context,
int resourceId) {
StringBuilder body = new StringBuilder();
try {
InputStream inputStream = context.getResources()
.openRawResource(resourceId);
InputStreamReader inputStreamReader = new InputStreamReader(
inputStream);
BufferedReader bufferedReader = new BufferedReader(
inputStreamReader);
String nextLine;
while ((nextLine = bufferedReader.readLine()) != null) {
body.append(nextLine);
body.append('\n');
}
} catch (IOException e) {
throw new RuntimeException(
"Could not open resource: " + resourceId, e);
} catch (Resources.NotFoundException nfe) {
throw new RuntimeException("Resource not found: "
+ resourceId, nfe);
}
return body.toString();
}
}

@ -0,0 +1,95 @@
package com.yinuo.library.vlc;
import android.net.Uri;
import com.common.commonlib.CommonApplication;
import org.videolan.libvlc.LibVLC;
import org.videolan.libvlc.Media;
import org.videolan.libvlc.MediaPlayCallback;
import org.videolan.libvlc.MediaPlayer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Objects;
/**
* Created by liwentian on 2017/10/12.
*/
public class RtspHelper {
private MediaPlayer mMediaPlayer;
private LibVLC mVlc;
private static RtspHelper sInstance = new RtspHelper();
private ByteBuffer mByteBuffer;
public static RtspHelper getInstance() {
return sInstance;
}
public interface RtspCallback {
void onPreviewFrame(ByteBuffer buffer, int width, int height);
}
private RtspHelper() {
}
public void createPlayer(String url, final int width, final int height, final RtspCallback callback) {
releasePlayer();
mByteBuffer = ByteBuffer.allocateDirect(width * height * 4)
.order(ByteOrder.nativeOrder());
try {
ArrayList<String> options = new ArrayList<String>();
options.add("--aout=opensles");
options.add("--audio-time-stretch"); // time stretching
options.add("-vvv"); // verbosity
options.add("--file-caching=2000");
options.add("--rtsp-frame-buffer-size=5");
options.add("--rtsp-tcp");
mVlc = new LibVLC(Objects.requireNonNull(CommonApplication.Companion.getContext()), options);
// Create media player
mMediaPlayer = new MediaPlayer(mVlc);
mMediaPlayer.setVideoFormat("RGBA", width, height, width * 4);
mMediaPlayer.setVideoCallback(mByteBuffer, new MediaPlayCallback() {
@Override
public void onDisplay(final ByteBuffer byteBuffer) {
callback.onPreviewFrame(byteBuffer, width, height);
}
});
Media m = new Media(mVlc, Uri.parse(url));
int cache = 150;
m.addOption(":network-caching=" + cache);
m.addOption(":live-cacheing=" + cache);
m.addOption(":sout-mux-caching=" + cache);
m.addOption(":clock-jitter=0");
m.addOption(":clock-synchro=0");
m.addOption(":codec=mediacodec,iomx,all");
mMediaPlayer.setMedia(m);
mMediaPlayer.play();
} catch (Throwable e) {
e.printStackTrace();
}
}
public void releasePlayer() {
if (mVlc == null) {
return;
}
mMediaPlayer.setVideoCallback(null, null);
mMediaPlayer.stop();
mVlc.release();
mVlc = null;
}
}

@ -0,0 +1,117 @@
package com.yinuo.library.vlc;
import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
import static android.opengl.GLES20.GL_DEPTH_BUFFER_BIT;
import static android.opengl.GLES20.glClear;
import static android.opengl.GLES20.glClearColor;
import android.opengl.GLSurfaceView;
import com.yinuo.library.vlc.encoder.BaseMovieEncoder;
import com.yinuo.library.vlc.encoder.MovieEncoder1;
import com.yinuo.library.vlc.utils.LogUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* Created by liwentian on 2017/10/12.
*/
public class RtspSurfaceRender implements GLSurfaceView.Renderer, RtspHelper.RtspCallback {
private ByteBuffer mBuffer;
private GLSurfaceView mGLSurfaceView;
private RGBProgram mProgram;
private String mRtspUrl;
private BaseMovieEncoder mVideoEncoder;
public RtspSurfaceRender(GLSurfaceView glSurfaceView) {
mGLSurfaceView = glSurfaceView;
}
public void setRtspUrl(String url) {
mRtspUrl = url;
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void startRecording() {
mGLSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
if (!mVideoEncoder.isRecording()) {
// File output = CameraHelper.getOutputMediaFile(CameraHelper.MEDIA_TYPE_VIDEO, "");
// LogUtils.v(String.format("startRecording: %s", output));
// mVideoEncoder.startRecording(new BaseMovieEncoder.EncoderConfig(output, EGL14.eglGetCurrentContext()));
}
}
});
}
public void stopRecording() {
mGLSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
if (mVideoEncoder.isRecording()) {
mVideoEncoder.stopRecording();
}
}
});
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
LogUtils.v(String.format("onSurfaceChanged: width = %d, height = %d", width, height));
mProgram = new RGBProgram(mGLSurfaceView.getContext(), width, height);
mBuffer = ByteBuffer.allocateDirect(width * height * 4).order(ByteOrder.nativeOrder());
mVideoEncoder = new MovieEncoder1(mGLSurfaceView.getContext(), width, height);
RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, this);
}
public void onSurfaceDestoryed() {
RtspHelper.getInstance().releasePlayer();
}
@Override
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(1f, 1f, 1f, 1f);
mProgram.useProgram();
synchronized (mBuffer) {
mProgram.setUniforms(mBuffer.array(), 0);
}
mProgram.draw();
}
@Override
public void onPreviewFrame(final ByteBuffer buffer, int width, int height) {
synchronized (mBuffer) {
mBuffer.rewind();
buffer.rewind();
mBuffer.put(buffer);
}
mGLSurfaceView.post(new Runnable() {
@Override
public void run() {
mVideoEncoder.frameAvailable(buffer.array(), System.nanoTime());
}
});
mGLSurfaceView.requestRender();
}
}

@ -0,0 +1,168 @@
package com.yinuo.library.vlc;
import android.util.Log;
import static android.opengl.GLES20.GL_COMPILE_STATUS;
import static android.opengl.GLES20.GL_FRAGMENT_SHADER;
import static android.opengl.GLES20.GL_LINK_STATUS;
import static android.opengl.GLES20.GL_VALIDATE_STATUS;
import static android.opengl.GLES20.GL_VERTEX_SHADER;
import static android.opengl.GLES20.glAttachShader;
import static android.opengl.GLES20.glCompileShader;
import static android.opengl.GLES20.glCreateProgram;
import static android.opengl.GLES20.glCreateShader;
import static android.opengl.GLES20.glDeleteProgram;
import static android.opengl.GLES20.glDeleteShader;
import static android.opengl.GLES20.glGetProgramiv;
import static android.opengl.GLES20.glGetShaderiv;
import static android.opengl.GLES20.glLinkProgram;
import static android.opengl.GLES20.glShaderSource;
import static android.opengl.GLES20.glValidateProgram;
/**
* Created by liwentian on 17/6/22.
*/
public class ShaderHelper {
private static final String TAG = "ShaderHelper";
/**
* Loads and compiles a vertex shader, returning the OpenGL object ID.
*/
public static int compileVertexShader(String shaderCode) {
return compileShader(GL_VERTEX_SHADER, shaderCode);
}
/**
* Loads and compiles a fragment shader, returning the OpenGL object ID.
*/
public static int compileFragmentShader(String shaderCode) {
return compileShader(GL_FRAGMENT_SHADER, shaderCode);
}
/**
* Compiles a shader, returning the OpenGL object ID.
*/
private static int compileShader(int type, String shaderCode) {
// Create a new shader object.
final int shaderObjectId = glCreateShader(type);
if (shaderObjectId == 0) {
Log.w(TAG, "Could not create new shader.");
return 0;
}
// Pass in the shader source.
glShaderSource(shaderObjectId, shaderCode);
// Compile the shader.
glCompileShader(shaderObjectId);
// Get the compilation status.
final int[] compileStatus = new int[1];
glGetShaderiv(shaderObjectId, GL_COMPILE_STATUS,
compileStatus, 0);
// Print the shader info log to the Android log output.
// Log.v(TAG, "Results of compiling source:" + "\n" + shaderCode
// + "\n:" + glGetShaderInfoLog(shaderObjectId));
// Verify the compile status.
if (compileStatus[0] == 0) {
// If it failed, delete the shader object.
glDeleteShader(shaderObjectId);
Log.w(TAG, "Compilation of shader failed.");
return 0;
}
// Return the shader object ID.
return shaderObjectId;
}
/**
* Links a vertex shader and a fragment shader together into an OpenGL
* program. Returns the OpenGL program object ID, or 0 if linking failed.
*/
public static int linkProgram(int vertexShaderId, int fragmentShaderId) {
// Create a new program object.
final int programObjectId = glCreateProgram();
if (programObjectId == 0) {
Log.w(TAG, "Could not create new program");
return 0;
}
// Attach the vertex shader to the program.
glAttachShader(programObjectId, vertexShaderId);
// Attach the fragment shader to the program.
glAttachShader(programObjectId, fragmentShaderId);
// Link the two shaders together into a program.
glLinkProgram(programObjectId);
// Get the link status.
final int[] linkStatus = new int[1];
glGetProgramiv(programObjectId, GL_LINK_STATUS,
linkStatus, 0);
// Print the program info log to the Android log output.
// Log.v(
// TAG,
// "Results of linking program:\n"
// + glGetProgramInfoLog(programObjectId));
// Verify the link status.
if (linkStatus[0] == 0) {
// If it failed, delete the program object.
glDeleteProgram(programObjectId);
Log.w(TAG, "Linking of program failed.");
return 0;
}
// Return the program object ID.
return programObjectId;
}
/**
* Validates an OpenGL program. Should only be called when developing the
* application.
*/
public static boolean validateProgram(int programObjectId) {
glValidateProgram(programObjectId);
final int[] validateStatus = new int[1];
glGetProgramiv(programObjectId, GL_VALIDATE_STATUS,
validateStatus, 0);
// Log.v(TAG, "Results of validating program: " + validateStatus[0]
// + "\nLog:" + glGetProgramInfoLog(programObjectId));
return validateStatus[0] != 0;
}
/**
* Helper function that compiles the shaders, links and validates the
* program, returning the program ID.
*/
public static int buildProgram(String vertexShaderSource,
String fragmentShaderSource) {
int program;
// Compile the shaders.
int vertexShader = compileVertexShader(vertexShaderSource);
int fragmentShader = compileFragmentShader(fragmentShaderSource);
// Link them into a shader program.
program = linkProgram(vertexShader, fragmentShader);
validateProgram(program);
return program;
}
}

@ -0,0 +1,32 @@
package com.yinuo.library.vlc;
import android.content.Context;
import static android.opengl.GLES20.glUseProgram;
/**
* Created by liwentian on 17/6/22.
*/
public class ShaderProgram {
public final String TAG = getClass().getSimpleName();
protected final int program;
protected final Context context;
protected int width, height;
protected ShaderProgram(Context context, int vertexId, int fragId, int width, int height) {
this.context = context;
this.width = width;
this.height = height;
program = ShaderHelper.buildProgram(ResourceUtils.readText(context, vertexId),
ResourceUtils.readText(context, fragId));
}
public void useProgram() {
glUseProgram(program);
}
}

@ -0,0 +1,35 @@
package com.yinuo.library.vlc;
import android.os.AsyncTask;
/**
* Created by liwentian on 2017/10/12.
*/
public abstract class Task extends AsyncTask<Void, Void, Void> {
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
}
@Override
protected void onProgressUpdate(Void... values) {
super.onProgressUpdate(values);
}
@Override
protected void onCancelled(Void aVoid) {
super.onCancelled(aVoid);
}
@Override
protected void onCancelled() {
super.onCancelled();
}
}

@ -0,0 +1,42 @@
package com.yinuo.library.vlc;
import android.os.AsyncTask;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
/**
* Created by liwentian on 2017/10/12.
*/
public class TaskUtils {
public static void execute(final Runnable runnable) {
execute(AsyncTask.THREAD_POOL_EXECUTOR, runnable);
}
public static <V> Future<V> submit(final Callable<V> callable) {
return ((ExecutorService) AsyncTask.THREAD_POOL_EXECUTOR).submit(callable);
}
public static void execute(final Executor executor, final Runnable runnable) {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
runnable.run();
return null;
}
}.executeOnExecutor(executor);
}
public static void execute(final Task task) {
execute(AsyncTask.THREAD_POOL_EXECUTOR, task);
}
public static void execute(final Executor executor, final Task task) {
task.executeOnExecutor(executor);
}
}

@ -0,0 +1,70 @@
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Created by liwentian on 17/8/1.
*/
public class AndroidMuxer {
private final int mExpectedNumTracks = 2;
private MediaMuxer mMuxer;
private volatile boolean mStarted;
private volatile int mNumTracks;
private volatile int mNumReleases;
public AndroidMuxer(String outputPath) {
try {
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
}
public int addTrack(MediaFormat trackFormat) {
if (mStarted) {
throw new IllegalStateException();
}
synchronized (mMuxer) {
int track = mMuxer.addTrack(trackFormat);
if (++mNumTracks == mExpectedNumTracks) {
mMuxer.start();
mStarted = true;
}
return track;
}
}
public boolean isStarted() {
return mStarted;
}
public void writeSampleData(int trackIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
synchronized (mMuxer) {
mMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
}
}
public boolean release() {
synchronized (mMuxer) {
if (++mNumReleases == mNumTracks) {
mMuxer.stop();
mMuxer.release();
return true;
}
}
return false;
}
}

@ -0,0 +1,157 @@
package com.yinuo.library.vlc.encoder;
import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import com.yinuo.library.vlc.TaskUtils;
import com.yinuo.library.vlc.utils.LogUtils;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Created by liwentian on 17/7/30.
*/
public class AudioEncoderCore extends MediaEncoderCore implements Runnable {
private static final String TAG = "audio_record";
// AAC Low Overhead Audio Transport Multiplex
private static final String MIME_TYPE = "audio/mp4a-latm";
// AAC frame size. Audio encoder input size is a multiple of this
protected static final int SAMPLES_PER_FRAME = 1024;
protected static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private int mSampleRate = 44100;
private int mChannelCount = 1;
private int mBitRate = 128000;
private int mMaxInputSize = 16384;
private AudioRecord mAudioRecord;
private int mChannelConfig;
public AudioEncoderCore(AndroidMuxer muxer) {
super(muxer);
prepareEncoder();
prepareRecorder();
}
private void prepareEncoder() {
MediaFormat format = new MediaFormat();
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setString(MediaFormat.KEY_MIME, MIME_TYPE);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mSampleRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mChannelCount);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, mMaxInputSize);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
LogUtils.e(e);
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
LogUtils.v(String.format("%s prepareEncoder, mEncoder = %s", getClass().getSimpleName(), mEncoder));
}
@SuppressLint("MissingPermission")
private void prepareRecorder() {
switch (mChannelCount) {
case 1:
mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
break;
case 2:
mChannelConfig = AudioFormat.CHANNEL_IN_STEREO;
break;
default:
throw new IllegalArgumentException();
}
int minBufferSize = AudioRecord.getMinBufferSize(mSampleRate,
mChannelConfig, AUDIO_FORMAT);
mAudioRecord = new AudioRecord(
MediaRecorder.AudioSource.CAMCORDER, // source
mSampleRate, // sample rate, hz
mChannelConfig, // channels
AUDIO_FORMAT, // audio format
minBufferSize * 4); // buffer size (bytes)
}
@Override
public void start() {
if (!mRecording) {
mRecording = true;
mAudioRecord.startRecording();
TaskUtils.execute(this);
}
}
@Override
public void stop() {
mRecording = false;
}
@Override
protected boolean isSurfaceInput() {
return false;
}
@Override
public void run() {
while (mRecording) {
drainEncoder(false);
drainAudio(false);
}
drainAudio(true);
mAudioRecord.stop();
drainEncoder(true);
release();
}
private void drainAudio(boolean endOfStream) {
// LogUtils.v(String.format("drainAudio %b", endOfStream));
ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
int bufferIndex = mEncoder.dequeueInputBuffer(-1); // wait indefinitely
if (bufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[bufferIndex];
inputBuffer.clear();
int len = mAudioRecord.read(inputBuffer, SAMPLES_PER_FRAME * 2); // read blocking
long ptsUs = System.nanoTime() / 1000;
if (endOfStream) {
mEncoder.queueInputBuffer(bufferIndex, 0, len, ptsUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
mEncoder.queueInputBuffer(bufferIndex, 0, len, ptsUs, 0);
}
}
}
}

@ -0,0 +1,337 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.content.Context;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import com.yinuo.library.vlc.utils.LogUtils;
import java.io.File;
import java.lang.ref.WeakReference;
/**
* Encode a movie from frames rendered from an external texture image.
* <p>
* The object wraps an encoder running on a dedicated thread. The various control messages
* may be sent from arbitrary threads (typically the app UI thread). The encoder thread
* manages both sides of the encoder (feeding and draining); the only external input is
* the GL texture.
* <p>
* The design is complicated slightly by the need to create an EGL context that shares state
* with a view that gets restarted if (say) the device orientation changes. When the view
* in question is a GLSurfaceView, we don't have full control over the EGL context creation
* on that side, so we have to bend a bit backwards here.
* <p>
* To use:
* <ul>
* <li>create TextureMovieEncoder object
* <li>create an EncoderConfig
* <li>call TextureMovieEncoder#startRecording() with the config
* <li>call TextureMovieEncoder#setTextureId() with the texture object that receives frames
* <li>for each frame, after latching it with SurfaceTexture#updateTexImage(),
* call TextureMovieEncoder#frameAvailable().
* </ul>
* <p>
* TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
*/
public abstract class BaseMovieEncoder implements Runnable {
static final int MSG_START_RECORDING = 0;
static final int MSG_STOP_RECORDING = 1;
static final int MSG_FRAME_AVAILABLE = 2;
static final int MSG_QUIT = 4;
// ----- accessed exclusively by encoder thread -----
private WindowSurface mInputWindowSurface;
private EglCore mEglCore;
private VideoEncoderCore mVideoEncoder;
private AudioEncoderCore mAudioEncoder;
// ----- accessed by multiple threads -----
protected volatile EncoderHandler mHandler;
private Object mReadyFence = new Object(); // guards ready/running
private volatile boolean mReady;
private volatile boolean mRunning;
protected Context mContext;
protected int mWidth, mHeight;
public BaseMovieEncoder(Context context, int width, int height) {
mContext = context;
mWidth = width;
mHeight = height;
}
/**
* Encoder configuration.
* <p>
* Object is immutable, which means we can safely pass it between threads without
* explicit synchronization (and don't need to worry about it getting tweaked out from
* under us).
* <p>
* TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
* with reasonable defaults for those and bit rate.
*/
public static class EncoderConfig {
AndroidMuxer mMuxer;
final File mOutputFile;
final EGLContext mEglContext;
public EncoderConfig(File outputFile,
EGLContext sharedEglContext) {
mOutputFile = outputFile;
mEglContext = sharedEglContext;
mMuxer = new AndroidMuxer(outputFile.getPath());
}
@Override
public String toString() {
return "EncoderConfig: " +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
/**
* Tells the video recorder to start recording. (Call from non-encoder thread.)
* <p>
* Creates a new thread, which will create an encoder using the provided configuration.
* <p>
* Returns after the recorder thread has started and is ready to accept Messages. The
* encoder may not yet be fully configured.
*/
public void startRecording(EncoderConfig config) {
synchronized (mReadyFence) {
if (mRunning) {
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
LogUtils.v(String.format("startRecording called"));
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
/**
* Tells the video recorder to stop recording. (Call from non-encoder thread.)
* <p>
* Returns immediately; the encoder/muxer may not yet be finished creating the movie.
* <p>
* TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
* so we can provide reasonable status UI (and let the caller know that movie encoding
* has completed).
*/
public void stopRecording() {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public abstract void onPrepareEncoder();
public abstract void onFrameAvailable(Object o, long timestamp);
/**
* Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
* <p>
* This function sends a message and returns immediately. This isn't sufficient -- we
* don't want the caller to latch a new frame until we're done with this one -- but we
* can get away with it so long as the input frame rate is reasonable and the encoder
* thread doesn't stall.
* <p>
* TODO: either block here until the texture has been rendered onto the encoder surface,
* or have a separate "block if still busy" method that the caller can execute immediately
* before it calls updateTexImage(). The latter is preferred because we don't want to
* stall the caller while this thread does work.
*/
public void frameAvailable(Object object, long timestamp) {
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
if (timestamp == 0) {
// Seeing this after device is toggled off/on with power button. The
// first frame back has a zero timestamp.
//
// MPEG4Writer thinks this is cause to abort() in native code, so it's very
// important that we just ignore the frame.
return;
}
onFrameAvailable(object, timestamp);
}
/**
* Encoder thread entry point. Establishes Looper/Handler and waits for messages.
* <p>
*
* @see Thread#run()
*/
@Override
public void run() {
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
}
}
/**
* Handles encoder state change requests. The handler is created on the encoder thread.
*/
static class EncoderHandler extends Handler {
private WeakReference<BaseMovieEncoder> mWeakEncoder;
public EncoderHandler(BaseMovieEncoder encoder) {
mWeakEncoder = new WeakReference<BaseMovieEncoder>(encoder);
}
@Override // runs on encoder thread
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
BaseMovieEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable(timestamp);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Starts recording.
*/
private void handleStartRecording(EncoderConfig config) {
LogUtils.v(String.format("handleStartRecording"));
prepareEncoder(config.mMuxer, config.mEglContext, mWidth, mHeight);
onPrepareEncoder();
}
/**
* Handles notification of an available frame.
* <p>
* The texture is rendered onto the encoder's input surface, along with a moving
* box (just because we can).
* <p>
*
* @param timestampNanos The frame's timestamp, from SurfaceTexture.
*/
private void handleFrameAvailable(long timestampNanos) {
mVideoEncoder.start();
mAudioEncoder.start();
onFrameAvailable();
mInputWindowSurface.setPresentationTime(timestampNanos);
mInputWindowSurface.swapBuffers();
}
public abstract void onFrameAvailable();
/**
* Handles a request to stop encoding.
*/
private void handleStopRecording() {
mVideoEncoder.stop();
mAudioEncoder.stop();
releaseEncoder();
}
private void prepareEncoder(AndroidMuxer muxer, EGLContext sharedContext, int width, int height) {
mWidth = width;
mHeight = height;
mVideoEncoder = new VideoEncoderCore(muxer, width, height);
mAudioEncoder = new AudioEncoderCore(muxer);
mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
mInputWindowSurface.makeCurrent();
}
private void releaseEncoder() {
mVideoEncoder.release();
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
}
}

@ -0,0 +1,376 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Core EGL state (display, context, config).
* <p>
* The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
*/
public final class EglCore {
private static final String TAG = GlUtil.TAG;
/**
* Constructor flag: surface must be recordable. This discourages EGL from using a
* pixel format that cannot be converted efficiently to something usable by the video
* encoder.
*/
public static final int FLAG_RECORDABLE = 0x01;
/**
* Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
* flag, GLES2 is used.
*/
public static final int FLAG_TRY_GLES3 = 0x02;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLConfig mEGLConfig = null;
private int mGlVersion = -1;
/**
* Prepares EGL display and context.
* <p>
* Equivalent to EglCore(null, 0).
*/
public EglCore() {
this(null, 0);
}
/**
* Prepares EGL display and context.
* <p>
* @param sharedContext The context to share, or null if sharing is not desired.
* @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
*/
public EglCore(EGLContext sharedContext, int flags) {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
if (sharedContext == null) {
sharedContext = EGL14.EGL_NO_CONTEXT;
}
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Try to get a GLES3 context, if requested.
if ((flags & FLAG_TRY_GLES3) != 0) {
//Log.d(TAG, "Trying GLES 3");
EGLConfig config = getConfig(flags, 3);
if (config != null) {
int[] attrib3_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib3_list, 0);
if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
//Log.d(TAG, "Got GLES 3 config");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 3;
}
}
}
if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
//Log.d(TAG, "Trying GLES 2");
EGLConfig config = getConfig(flags, 2);
if (config == null) {
throw new RuntimeException("Unable to find a suitable EGLConfig");
}
int[] attrib2_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib2_list, 0);
checkEglError("eglCreateContext");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 2;
}
// Confirm with query.
int[] values = new int[1];
EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
values, 0);
Log.d(TAG, "EGLContext created, client version " + values[0]);
}
/**
* Finds a suitable EGLConfig.
*
* @param flags Bit flags from constructor.
* @param version Must be 2 or 3.
*/
private EGLConfig getConfig(int flags, int version) {
int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
if (version >= 3) {
renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
}
// The actual surface is generally RGBA or RGBX, so situationally omitting alpha
// doesn't really help. It can also lead to a huge performance hit on glReadPixels()
// when reading into a GL_RGBA buffer.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
//EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, renderableType,
EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
EGL14.EGL_NONE
};
if ((flags & FLAG_RECORDABLE) != 0) {
attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
attribList[attribList.length - 2] = 1;
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
return configs[0];
}
/**
* Discards all resources held by this class, notably the EGL context. This must be
* called from the thread where the context was created.
* <p>
* On completion, no context will be current.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// Android is unusual in that it uses a reference-counted EGLDisplay. So for
// every eglInitialize() we need an eglTerminate().
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLConfig = null;
}
@Override
protected void finalize() throws Throwable {
try {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// We're limited here -- finalizers don't run on the thread that holds
// the EGL state, so if a surface or context is still current on another
// thread we can't fully release it here. Exceptions thrown from here
// are quietly discarded. Complain in the log file.
Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
release();
}
} finally {
super.finalize();
}
}
/**
* Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
* still current in a context.
*/
public void releaseSurface(EGLSurface eglSurface) {
EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
}
/**
* Creates an EGL surface associated with a Surface.
* <p>
* If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
*/
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new RuntimeException("invalid surface: " + surface);
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
public EGLSurface createOffscreenSurface(int width, int height) {
int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Makes our EGL context current, using the supplied surface for both "draw" and "read".
*/
public void makeCurrent(EGLSurface eglSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Makes our EGL context current, using the supplied "draw" and "read" surfaces.
*/
public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent(draw,read) failed");
}
}
/**
* Makes no context current.
*/
public void makeNothingCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers(EGLSurface eglSurface) {
return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
}
/**
* Returns true if our context and the specified surface are current.
*/
public boolean isCurrent(EGLSurface eglSurface) {
return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
}
/**
* Performs a simple surface query.
*/
public int querySurface(EGLSurface eglSurface, int what) {
int[] value = new int[1];
EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
return value[0];
}
/**
* Queries a string value.
*/
public String queryString(int what) {
return EGL14.eglQueryString(mEGLDisplay, what);
}
/**
* Returns the GLES version this context is configured for (currently 2 or 3).
*/
public int getGlVersion() {
return mGlVersion;
}
/**
* Writes the current display, context, and surface to the log.
*/
public static void logCurrent(String msg) {
EGLDisplay display;
EGLContext context;
EGLSurface surface;
display = EGL14.eglGetCurrentDisplay();
context = EGL14.eglGetCurrentContext();
surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
", surface=" + surface);
}
/**
* Checks for EGL errors. Throws an exception if an error has been raised.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
public EGLContext getEGLContext() {
return mEGLContext;
}
}

@ -0,0 +1,197 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.graphics.Bitmap;
import android.opengl.EGL14;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Common base class for EGL surfaces.
* <p>
* There can be multiple surfaces associated with a single context.
*/
public class EglSurfaceBase {
protected static final String TAG = GlUtil.TAG;
// EglCore object we're associated with. It may be associated with multiple surfaces.
protected EglCore mEglCore;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
private int mWidth = -1;
private int mHeight = -1;
protected EglSurfaceBase(EglCore eglCore) {
mEglCore = eglCore;
}
/**
* Creates a window surface.
* <p>
* @param surface May be a Surface or SurfaceTexture.
*/
public void createWindowSurface(Object surface) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createWindowSurface(surface);
// Don't cache width/height here, because the size of the underlying surface can change
// out from under us (see e.g. HardwareScalerActivity).
//mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
//mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
}
/**
* Creates an off-screen surface.
*/
public void createOffscreenSurface(int width, int height) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
/**
* Returns the surface's width, in pixels.
* <p>
* If this is called on a window surface, and the underlying surface is in the process
* of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
* callback). The size should match after the next buffer swap.
*/
public int getWidth() {
if (mWidth < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
} else {
return mWidth;
}
}
/**
* Returns the surface's height, in pixels.
*/
public int getHeight() {
if (mHeight < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
} else {
return mHeight;
}
}
/**
* Release the EGL surface.
*/
public void releaseEglSurface() {
mEglCore.releaseSurface(mEGLSurface);
mEGLSurface = EGL14.EGL_NO_SURFACE;
mWidth = mHeight = -1;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
mEglCore.makeCurrent(mEGLSurface);
}
/**
* Makes our EGL context and surface current for drawing, using the supplied surface
* for reading.
*/
public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers() {
boolean result = mEglCore.swapBuffers(mEGLSurface);
if (!result) {
Log.d(TAG, "WARNING: swapBuffers() failed");
}
return result;
}
/**
* Sends the presentation time stamp to EGL.
*
* @param nsecs Timestamp, in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
mEglCore.setPresentationTime(mEGLSurface, nsecs);
}
/**
* Saves the EGL surface to a file.
* <p>
* Expects that this object's EGL surface is current.
*/
public void saveFrame(File file) throws IOException {
if (!mEglCore.isCurrent(mEGLSurface)) {
throw new RuntimeException("Expected EGL context/surface is not current");
}
// glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
// data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
// constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
// Bitmap "copy pixels" method wants the same format GL provides.
//
// Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
// here often.
//
// Making this even more interesting is the upside-down nature of GL, which means
// our output will look upside down relative to what appears on screen if the
// typical GL conventions are used.
String filename = file.toString();
int width = getWidth();
int height = getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
buf.order(ByteOrder.LITTLE_ENDIAN);
GLES20.glReadPixels(0, 0, width, height,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkGlError("glReadPixels");
buf.rewind();
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(filename));
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(buf);
bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
bmp.recycle();
} finally {
if (bos != null) bos.close();
}
Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
}
}

@ -0,0 +1,195 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.opengl.GLES20;
import android.opengl.GLES30;
import android.opengl.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Some OpenGL utility functions.
*/
public class GlUtil {
public static final String TAG = "Grafika";
/** Identity matrix for general use. Don't modify or life will get weird. */
public static final float[] IDENTITY_MATRIX;
static {
IDENTITY_MATRIX = new float[16];
Matrix.setIdentityM(IDENTITY_MATRIX, 0);
}
private static final int SIZEOF_FLOAT = 4;
private GlUtil() {} // do not instantiate
/**
* Creates a new program from the supplied vertex and fragment shaders.
*
* @return A handle to the program, or 0 on failure.
*/
public static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
/**
* Compiles the provided shader source.
*
* @return A handle to the shader, or 0 on failure.
*/
public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
/**
* Checks to see if a GLES error has been raised.
*/
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
Log.e(TAG, msg);
throw new RuntimeException(msg);
}
}
/**
* Checks to see if the location we obtained is valid. GLES returns -1 if a label
* could not be found, but does not set the GL error.
* <p>
* Throws a RuntimeException if the location is invalid.
*/
public static void checkLocation(int location, String label) {
if (location < 0) {
throw new RuntimeException("Unable to locate '" + label + "' in program");
}
}
/**
* Creates a texture from raw data.
*
* @param data Image data, in a "direct" ByteBuffer.
* @param width Texture width, in pixels (not bytes).
* @param height Texture height, in pixels.
* @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA).
* @return Handle to texture.
*/
public static int createImageTexture(ByteBuffer data, int width, int height, int format) {
int[] textureHandles = new int[1];
int textureHandle;
GLES20.glGenTextures(1, textureHandles, 0);
textureHandle = textureHandles[0];
GlUtil.checkGlError("glGenTextures");
// Bind the texture handle to the 2D texture target.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
// Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
// is smaller or larger than the source image.
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GlUtil.checkGlError("loadImageTexture");
// Load the data from the buffer into the texture handle.
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
GlUtil.checkGlError("loadImageTexture");
return textureHandle;
}
/**
* Allocates a direct float buffer, and populates it with the float array data.
*/
public static FloatBuffer createFloatBuffer(float[] coords) {
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
/**
* Writes GL version info to the log.
*/
public static void logVersionInfo() {
Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR));
Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER));
Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION));
if (false) {
int[] values = new int[1];
GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0);
int majorVersion = values[0];
GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0);
int minorVersion = values[0];
if (GLES30.glGetError() == GLES30.GL_NO_ERROR) {
Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion);
}
}
}
}

@ -0,0 +1,147 @@
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import com.yinuo.library.vlc.utils.LogUtils;
import java.nio.ByteBuffer;
/**
* Created by liwentian on 17/8/1.
*/
public abstract class MediaEncoderCore {
protected final String TAG = getClass().getSimpleName();
protected static final boolean VERBOSE = false;
protected AndroidMuxer mMuxer;
protected MediaCodec mEncoder;
protected int mTrackIndex = -1;
protected volatile boolean mRecording;
protected MediaCodec.BufferInfo mBufferInfo;
public MediaEncoderCore(AndroidMuxer muxer) {
LogUtils.v(String.format("%s <init> called", getClass().getSimpleName()));
mMuxer = muxer;
mBufferInfo = new MediaCodec.BufferInfo();
}
public abstract void start();
public abstract void stop();
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
* <p>
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
public void drainEncoder(boolean endOfStream) {
// LogUtils.v(String.format("%s drainEncoder: end = %b", getClass().getSimpleName(), endOfStream));
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}
public void release() {
LogUtils.v(String.format("%s.release", getClass().getSimpleName()));
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mMuxer != null) {
mMuxer.release();
}
}
protected abstract boolean isSurfaceInput();
}

@ -0,0 +1,60 @@
package com.yinuo.library.vlc.encoder;
import android.content.Context;
import com.yinuo.library.vlc.RGBProgram;
import com.yinuo.library.vlc.utils.LogUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created by liwentian on 2017/10/31.
*/
public class MovieEncoder1 extends BaseMovieEncoder {
private volatile RGBProgram mRGBProgram;
private volatile ByteBuffer mYUVBuffer;
public MovieEncoder1(Context context, int width, int height) {
super(context, width, height);
}
@Override
public void onPrepareEncoder() {
LogUtils.v(String.format("onPrepareEncoder width = %d, height = %d", mWidth, mHeight));
mRGBProgram = new RGBProgram(mContext, mWidth, mHeight);
mYUVBuffer = ByteBuffer.allocateDirect(mWidth * mHeight * 4)
.order(ByteOrder.nativeOrder());
}
@Override
public void onFrameAvailable(Object object, long timestamp) {
byte[] data = (byte[]) object;
if (mYUVBuffer == null) {
return;
}
// LogUtils.v(String.format("onFrameAvailable: data = %d, buffer = %d", data.length, mYUVBuffer.capacity()));
synchronized (mYUVBuffer) {
mYUVBuffer.position(0);
int len = Math.min(mYUVBuffer.capacity(), data.length);
mYUVBuffer.put(data, 0, len);
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp));
}
@Override
public void onFrameAvailable() {
mRGBProgram.useProgram();
synchronized (mYUVBuffer) {
mRGBProgram.setUniforms(mYUVBuffer.array());
}
mRGBProgram.draw();
}
}

@ -0,0 +1,114 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCore extends MediaEncoderCore {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private static final int BIT_RATE = 4000000;
private Surface mInputSurface;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(AndroidMuxer muxer, int width, int height) {
super(muxer);
prepareEncoder(width, height);
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
// mMuxer = new MediaMuxer(outputFile.toString(),
// MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
private void prepareEncoder(int width, int height) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
/**
* Returns the encoder's input surface.
*/
public Surface getInputSurface() {
return mInputSurface;
}
@Override
public void start() {
drainEncoder(false);
}
@Override
public void stop() {
drainEncoder(true);
}
@Override
protected boolean isSurfaceInput() {
return true;
}
}

@ -0,0 +1,90 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.graphics.SurfaceTexture;
import android.view.Surface;
/**
* Recordable EGL window surface.
* <p>
* It's good practice to explicitly release() the surface, preferably from a "finally" block.
*/
public class WindowSurface extends EglSurfaceBase {
private Surface mSurface;
private boolean mReleaseSurface;
/**
* Associates an EGL surface with the native window surface.
* <p>
* Set releaseSurface to true if you want the Surface to be released when release() is
* called. This is convenient, but can interfere with framework classes that expect to
* manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
* surfaceDestroyed() callback won't fire).
*/
public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
super(eglCore);
createWindowSurface(surface);
mSurface = surface;
mReleaseSurface = releaseSurface;
}
/**
* Associates an EGL surface with the SurfaceTexture.
*/
public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
super(eglCore);
createWindowSurface(surfaceTexture);
}
/**
* Releases any resources associated with the EGL surface (and, if configured to do so,
* with the Surface as well).
* <p>
* Does not require that the surface's EGL context be current.
*/
public void release() {
releaseEglSurface();
if (mSurface != null) {
if (mReleaseSurface) {
mSurface.release();
}
mSurface = null;
}
}
/**
* Recreate the EGLSurface, using the new EglBase. The caller should have already
* freed the old EGLSurface with releaseEglSurface().
* <p>
* This is useful when we want to update the EGLSurface associated with a Surface.
* For example, if we want to share with a different EGLContext, which can only
* be done by tearing down and recreating the context. (That's handled by the caller;
* this just creates a new EGLSurface for the Surface we were handed earlier.)
* <p>
* If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
* context somewhere, the create call will fail with complaints from the Surface
* about already being connected.
*/
public void recreate(EglCore newEglCore) {
if (mSurface == null) {
throw new RuntimeException("not yet implemented for SurfaceTexture");
}
mEglCore = newEglCore; // switch to new context
createWindowSurface(mSurface); // create new surface
}
}

@ -0,0 +1,190 @@
package com.yinuo.library.vlc.encoder;
import android.content.Context;
import android.content.res.Configuration;
import android.opengl.GLES20;
import android.opengl.Matrix;
import com.yinuo.library.vlc.R;
import com.yinuo.library.vlc.ShaderProgram;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import static android.opengl.GLES20.GL_CLAMP_TO_EDGE;
import static android.opengl.GLES20.GL_FLOAT;
import static android.opengl.GLES20.GL_LINEAR;
import static android.opengl.GLES20.GL_LUMINANCE;
import static android.opengl.GLES20.GL_LUMINANCE_ALPHA;
import static android.opengl.GLES20.GL_TEXTURE0;
import static android.opengl.GLES20.GL_TEXTURE1;
import static android.opengl.GLES20.GL_TEXTURE_2D;
import static android.opengl.GLES20.GL_TEXTURE_MAG_FILTER;
import static android.opengl.GLES20.GL_TEXTURE_MIN_FILTER;
import static android.opengl.GLES20.GL_TEXTURE_WRAP_S;
import static android.opengl.GLES20.GL_TEXTURE_WRAP_T;
import static android.opengl.GLES20.GL_TRIANGLE_STRIP;
import static android.opengl.GLES20.GL_UNSIGNED_BYTE;
import static android.opengl.GLES20.glActiveTexture;
import static android.opengl.GLES20.glBindTexture;
import static android.opengl.GLES20.glDrawArrays;
import static android.opengl.GLES20.glEnableVertexAttribArray;
import static android.opengl.GLES20.glGenTextures;
import static android.opengl.GLES20.glGetAttribLocation;
import static android.opengl.GLES20.glGetUniformLocation;
import static android.opengl.GLES20.glTexImage2D;
import static android.opengl.GLES20.glTexParameterf;
import static android.opengl.GLES20.glUniform1i;
import static android.opengl.GLES20.glVertexAttribPointer;
/**
* Created by liwentian on 17/8/16.
*/
/**
* CameraNV21
*/
public class YUVProgram extends ShaderProgram {
protected final int mUniformYTextureLocation;
protected final int mUniformUVTextureLocation;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
public static final float TEXTURE_UPSIDE_DOWN[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
public static final float TEXTURE_NO_ROTATION[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
// Attribute locations
private final int aPositionLocation;
private final int aTextureCoordinatesLocation;
private final int uMVPMatrixLocation;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private int mYTestureId, mUVTextureId;
private ByteBuffer mYBuffer, mUVBuffer;
public YUVProgram(Context context, int width, int height) {
super(context, R.raw.yuv_vertex, R.raw.yuv_fragment, width, height);
mUniformYTextureLocation = glGetUniformLocation(program, "y_texture");
mUniformUVTextureLocation = glGetUniformLocation(program, "uv_texture");
uMVPMatrixLocation = glGetUniformLocation(program, "uMVPMatrix");
aPositionLocation = glGetAttribLocation(program, "a_Position");
aTextureCoordinatesLocation = glGetAttribLocation(program, "a_TextureCoordinates");
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mYBuffer = ByteBuffer.allocateDirect(width * height)
.order(ByteOrder.nativeOrder());
mUVBuffer = ByteBuffer.allocateDirect(width * height / 2)
.order(ByteOrder.nativeOrder());
int[] textures = new int[2];
glGenTextures(2, textures, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textures[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
mYTestureId = textures[0];
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, textures[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
mUVTextureId = textures[1];
mGLCubeBuffer.clear();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(TEXTURE_NO_ROTATION).position(0);
}
public void setUpsideDown() {
mGLTextureBuffer.clear();
mGLTextureBuffer.put(TEXTURE_UPSIDE_DOWN).position(0);
}
public void setUniforms(byte[] data) {
mYBuffer.position(0);
mYBuffer.put(data, 0, width * height);
mUVBuffer.position(0);
mUVBuffer.put(data, width * height, width * height / 2);
mYBuffer.position(0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mYTestureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height,
0, GL_LUMINANCE, GL_UNSIGNED_BYTE, mYBuffer);
glUniform1i(mUniformYTextureLocation, 0);
GlUtil.checkGlError("init YTexture");
mUVBuffer.position(0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, mUVTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, width / 2, height / 2,
0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, mUVBuffer);
glUniform1i(mUniformUVTextureLocation, 1);
float[] matrix = new float[16];
Matrix.setIdentityM(matrix, 0);
int orientation = context.getResources().getConfiguration().orientation;
int degrees = orientation == Configuration.ORIENTATION_LANDSCAPE ? 0 : -90;
Matrix.rotateM(matrix, 0, degrees, 0.0f, 0.0f, 1.0f);
GLES20.glUniformMatrix4fv(uMVPMatrixLocation, 1, false, matrix, 0);
GlUtil.checkGlError("init UVTexture");
mGLCubeBuffer.position(0);
glVertexAttribPointer(aPositionLocation, 2, GL_FLOAT, false, 0, mGLCubeBuffer);
glEnableVertexAttribArray(aPositionLocation);
mGLTextureBuffer.position(0);
glVertexAttribPointer(aTextureCoordinatesLocation, 2, GL_FLOAT, false, 0, mGLTextureBuffer);
glEnableVertexAttribArray(aTextureCoordinatesLocation);
}
public void draw() {
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
}

@ -0,0 +1,61 @@
package com.yinuo.library.vlc.utils;
import android.util.Log;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
/**
* Created by liwentian on 17/8/16.
*/
public class LogUtils {
private static final String TAG = "bush";
public static void v(String msg) {
Log.v(TAG, msg);
}
public static void v(String tag, String msg) {
Log.v(tag, msg);
}
public static void e(String msg) {
Log.e(TAG, msg);
}
public static void e(String tag, String msg) {
Log.e(tag, msg);
}
public static void w(String msg) {
Log.w(TAG, msg);
}
public static void w(String tag, String msg) {
Log.w(tag, msg);
}
public static void e(Throwable e) {
String s = getThrowableString(e);
e(s);
}
private static String getThrowableString(Throwable e) {
Writer writer = new StringWriter();
PrintWriter printWriter = new PrintWriter(writer);
while (e != null) {
e.printStackTrace(printWriter);
e = e.getCause();
}
String text = writer.toString();
printWriter.close();
return text;
}
}

@ -0,0 +1,7 @@
precision mediump float;
varying vec4 v_Color;
void main() {
gl_FragColor = v_Color;
}

@ -0,0 +1,11 @@
attribute vec4 a_Position;
attribute vec4 a_Color;
varying vec4 v_Color;
uniform mat4 u_Matrix;
void main() {
v_Color = a_Color;
gl_Position = u_Matrix * a_Position;
}

@ -0,0 +1,15 @@
precision mediump float;
varying vec2 v_TextureCoordinates;
uniform sampler2D s_texture;
void main() {
float r, g, b;
r = texture2D(s_texture, v_TextureCoordinates).r;
g = texture2D(s_texture, v_TextureCoordinates).g;
b = texture2D(s_texture, v_TextureCoordinates).b;
gl_FragColor = vec4(r, g, b, 1.0);
}

@ -0,0 +1,11 @@
attribute vec4 a_Position;
attribute vec2 a_TextureCoordinates;
varying vec2 v_TextureCoordinates;
uniform mat4 u_Matrix;
void main() {
v_TextureCoordinates = a_TextureCoordinates;
gl_Position = u_Matrix * a_Position;
}

@ -0,0 +1,14 @@
precision mediump float;
varying vec2 v_TextureCoordinates;
uniform sampler2D s_texture;
void main() {
gl_FragColor = texture2D(s_texture, v_TextureCoordinates);
// float r = texture2D(s_texture, v_TextureCoordinates).r;
// float g = texture2D(s_texture, v_TextureCoordinates).g;
// float b = texture2D(s_texture, v_TextureCoordinates).b;
// gl_FragColor = vec4(1.0, g, b, 1.0);
}

@ -0,0 +1,9 @@
attribute vec4 a_Position;
attribute vec2 a_TextureCoordinates;
varying vec2 v_TextureCoordinates;
void main() {
v_TextureCoordinates = a_TextureCoordinates;
gl_Position = a_Position;
}

@ -0,0 +1,20 @@
precision mediump float;
varying vec2 v_TextureCoordinates;
uniform sampler2D y_texture;
uniform sampler2D uv_texture;
void main() {
float r, g, b, y, u, v;
y = texture2D(y_texture, v_TextureCoordinates).r;
u = texture2D(uv_texture, v_TextureCoordinates).a - 0.5;
v = texture2D(uv_texture, v_TextureCoordinates).r - 0.5;
r = y + 1.13983 * v;
g = y - 0.39465 * u - 0.58060 * v;
b = y + 2.03211 * u;
gl_FragColor = vec4(r, g, b, 1.0);
}

@ -0,0 +1,11 @@
attribute vec4 a_Position;
attribute vec2 a_TextureCoordinates;
uniform mat4 uMVPMatrix;
varying vec2 v_TextureCoordinates;
void main() {
v_TextureCoordinates = a_TextureCoordinates;
gl_Position = uMVPMatrix * a_Position;
}

@ -0,0 +1,3 @@
<resources>
<string name="app_name">library-vlc</string>
</resources>

@ -4,3 +4,4 @@ include ':library-push'
include ':library-ijkplayer'
include ':library-serialPort'
include ':library-common'
include ':library-vlc'

Loading…
Cancel
Save