desc:remove vlc

main
xiaowusky 2 years ago
parent 096556b85b
commit 4c68743447

@ -15,7 +15,7 @@ android {
ndk { ndk {
//SOso //SOso
abiFilters "armeabi-v7a" abiFilters "arm64-v8a"
} }
} }

Binary file not shown.

@ -41,6 +41,8 @@ class HomeActivity : NoOptionsActivity() {
ActivityHomeBinding.inflate(layoutInflater) ActivityHomeBinding.inflate(layoutInflater)
} }
var mClient: EasyPlayerClient? = null
override val isHome: Boolean override val isHome: Boolean
get() = true get() = true
@ -75,7 +77,7 @@ class HomeActivity : NoOptionsActivity() {
cameraSwitch.performClick() cameraSwitch.performClick()
} }
cameraSwitch.setOnCheckedChangeListener { buttonView, isChecked -> cameraSwitch.setOnCheckedChangeListener { buttonView, isChecked ->
// if (isChecked) mRender?.startRecording() else mRender?.stopRecording() if (isChecked) mClient?.startRecord("/sdcard/test.mp4") else mClient?.stopRecord()
} }
itemSetting.post { itemSetting.post {
@ -98,9 +100,7 @@ class HomeActivity : NoOptionsActivity() {
* 设置摄像头 * 设置摄像头
*/ */
private fun setForCamera() { private fun setForCamera() {
lifecycleScope.launch { mClient = EasyPlayerClient(this@HomeActivity, mBinding.surface, null) {
delay(300)
val client = EasyPlayerClient(this@HomeActivity, mBinding.surface, null) {
if (!AppData.hasCameraData()) { if (!AppData.hasCameraData()) {
AppData.setCameraData(true) AppData.setCameraData(true)
changeViewStatus() changeViewStatus()
@ -108,12 +108,11 @@ class HomeActivity : NoOptionsActivity() {
} }
watchCamera(DELAY_TIME_CHECK_CAMERA) watchCamera(DELAY_TIME_CHECK_CAMERA)
} }
client.play(CAMERA_URL) mClient?.play(CAMERA_URL)
// 第一次很慢所以10秒 // 第一次很慢所以10秒
watchCamera(DELAY_TIME_OPEN_CAMERA) watchCamera(DELAY_TIME_OPEN_CAMERA)
showLoadingDialog(R.string.connecting_camera) showLoadingDialog(R.string.connecting_camera)
} }
}
/** /**
* 改变View状态 * 改变View状态
@ -129,7 +128,7 @@ class HomeActivity : NoOptionsActivity() {
// tipView // tipView
mBinding.tipView.visibility = mBinding.tipView.visibility =
if (!AppData.hasCameraData() && AppData.hasSensorData()) View.VISIBLE else View.GONE if (AppData.hasSensorData()) View.VISIBLE else View.GONE
mBinding.tipView.setImageBitmap(TxtOverlay.getOverlayBitmap()) mBinding.tipView.setImageBitmap(TxtOverlay.getOverlayBitmap())
} }
} }
@ -145,7 +144,7 @@ class HomeActivity : NoOptionsActivity() {
if (!isLoadingShowing() && !AppData.hasSensorData()) { if (!isLoadingShowing() && !AppData.hasSensorData()) {
showLoadingDialog(R.string.connecting_camera) showLoadingDialog(R.string.connecting_camera)
} }
// mRender?.reStart() mClient?.play(CAMERA_URL)
watchCamera(DELAY_TIME_OPEN_CAMERA) watchCamera(DELAY_TIME_OPEN_CAMERA)
} else { } else {
watchCamera(DELAY_TIME_CHECK_CAMERA) watchCamera(DELAY_TIME_CHECK_CAMERA)

@ -1 +0,0 @@
/build

@ -1,17 +0,0 @@
apply from: "${rootProject.rootDir}/buildCommon/commonLibConfig.gradle"
project.ext.setLibDefaultConfig project
android {
lintOptions {
abortOnError false
}
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation(name: 'libvlc-3.0.0', ext: 'aar')
implementation project(path: ':library-common')
implementation project(path: ':library-push')
}

Binary file not shown.

@ -1,21 +0,0 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.yinuo.library.vlc">
<uses-permission android:name="android.permission.RECORD_AUDIO" />
</manifest>

@ -1,5 +0,0 @@
package com.yinuo.library.vlc
interface IFrameCallBack {
fun onFrame()
}

@ -1,97 +0,0 @@
package com.yinuo.library.vlc
import android.net.Uri
import com.common.commonlib.CommonApplication
import com.yinuo.library.vlc.utils.LogUtils
import org.easydarwin.push.EasyPusher
import org.easydarwin.push.InitCallback
import org.easydarwin.push.Pusher
object PushHelper {
private val mPusher: EasyPusher by lazy {
EasyPusher()
}
private val mApplicationContext = CommonApplication.getContext()
private var mIp: String? = null
private var mPort: String? = null
private var mId: String? = null
private var mInitialized = false
var callback = InitCallback { code ->
var msg = ""
when (code) {
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_INVALID_KEY -> msg = "无效Key"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_SUCCESS -> msg = "未开始"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECTING -> msg = "连接中"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECTED -> msg = "连接成功"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECT_FAILED -> msg = "连接失败"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECT_ABORT -> msg =
"连接异常中断"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_PUSHING -> msg = "推流中"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_DISCONNECTED -> msg = "断开连接"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_PLATFORM_ERR -> msg = "平台不匹配"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_COMPANY_ID_LEN_ERR -> msg =
"授权使用商不匹配"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_PROCESS_NAME_LEN_ERR -> msg =
"进程名称长度不匹配"
}
LogUtils.v("PushHelper. InitCallback $msg")
}
fun setPushUrl(url: String) {
val mUri = Uri.parse(url)
mIp = mUri.host
mPort = mUri.port.toString()
mId = mUri.path
if (mId?.startsWith("/")!!){
mId = mId!!.substring(1)
}
}
fun startStream(hevc: Boolean) {
stop()
initHelper(hevc)
}
fun stop() {
mPusher.stop()
mInitialized = false
}
fun pushData(h264: ByteArray, length: Int, timeStamp: Long) {
if (mIp.isNullOrEmpty() || mPort.isNullOrEmpty() || mId.isNullOrEmpty()) {
LogUtils.e("PushHelper error, please setPushUrl before!!")
return
}
if (!mInitialized) {
LogUtils.e("PushHelper error, please init first!!")
return
}
mPusher.push(h264, 0, length, timeStamp, 1)
}
private fun initHelper(hevc: Boolean) {
if (mIp.isNullOrEmpty() || mPort.isNullOrEmpty() || mId.isNullOrEmpty()) {
LogUtils.e("PushHelper error, please setPushUrl first!!")
return
}
mPusher.initPush(mApplicationContext, callback)
mPusher.setMediaInfo(
if (hevc) Pusher.Codec.EASY_SDK_VIDEO_CODEC_H265 else Pusher.Codec.EASY_SDK_VIDEO_CODEC_H264,
24,
Pusher.Codec.EASY_SDK_AUDIO_CODEC_AAC,
1,
8000,
16
)
mPusher.start(mIp, mPort, mId, Pusher.TransType.EASY_RTP_OVER_TCP)
mInitialized = true
}
}

@ -1,133 +0,0 @@
package com.yinuo.library.vlc;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import static android.opengl.GLES20.glGetAttribLocation;
import static android.opengl.GLES20.glGetUniformLocation;
import static android.opengl.GLES20.glUniformMatrix4fv;
import static android.opengl.Matrix.scaleM;
import static android.opengl.Matrix.setIdentityM;
/**
* Created by liwentian on 17/6/22.
*/
public class RGBProgram extends ShaderProgram {
protected final int mUniformSTextureLocation;
protected final int mUniformMatrixLocation;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
public static final float TEXTURE_NO_ROTATION[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
// Attribute locations
private final int aPositionLocation;
private final int aTextureCoordinatesLocation;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private int mTextureId;
private ByteBuffer mBuffer;
private float[] mMatrix = new float[16];
public RGBProgram(Context context, int width, int height) {
super(context, R.raw.rgb_vertex, R.raw.rgb_fragment, width, height);
mUniformSTextureLocation = glGetUniformLocation(program, "s_texture");
mUniformMatrixLocation = glGetUniformLocation(program, "u_Matrix");
aPositionLocation = glGetAttribLocation(program, "a_Position");
aTextureCoordinatesLocation = glGetAttribLocation(program, "a_TextureCoordinates");
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mBuffer = ByteBuffer.allocateDirect(width * height * 4)
.order(ByteOrder.nativeOrder());
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
mTextureId = textures[0];
mGLCubeBuffer.clear();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(TEXTURE_NO_ROTATION).position(0);
}
public void setUniforms(byte[] data) {
setUniforms(data, 0);
}
public void setUniforms(byte[] data, int rotateDegrees) {
setUniforms(data, 1f, 1f, rotateDegrees);
}
public void setUniforms(byte[] data, float scaleX, float scaleY, int rotateDegrees) {
mBuffer.position(0);
mBuffer.put(data, 0, width * height * 4);
mBuffer.position(0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height,
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mBuffer);
GLES20.glUniform1i(mUniformSTextureLocation, 0);
mGLCubeBuffer.position(0);
GLES20.glVertexAttribPointer(aPositionLocation, 2, GLES20.GL_FLOAT, false, 0, mGLCubeBuffer);
GLES20.glEnableVertexAttribArray(aPositionLocation);
mGLTextureBuffer.position(0);
GLES20.glVertexAttribPointer(aTextureCoordinatesLocation, 2, GLES20.GL_FLOAT, false, 0, mGLTextureBuffer);
GLES20.glEnableVertexAttribArray(aTextureCoordinatesLocation);
setIdentityM(mMatrix, 0);
scaleM(mMatrix, 0, scaleX, scaleY, 1);
Matrix.rotateM(mMatrix, 0, rotateDegrees, 0.0f, 0.0f, 1.0f);
glUniformMatrix4fv(mUniformMatrixLocation, 1, false, mMatrix, 0);
}
public void draw() {
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(aPositionLocation);
GLES20.glDisableVertexAttribArray(aTextureCoordinatesLocation);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
}
}

@ -1,45 +0,0 @@
package com.yinuo.library.vlc;
import android.content.Context;
import android.content.res.Resources;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* Created by liwentian on 17/6/22.
*/
public class ResourceUtils {
public static String readText(Context context,
int resourceId) {
StringBuilder body = new StringBuilder();
try {
InputStream inputStream = context.getResources()
.openRawResource(resourceId);
InputStreamReader inputStreamReader = new InputStreamReader(
inputStream);
BufferedReader bufferedReader = new BufferedReader(
inputStreamReader);
String nextLine;
while ((nextLine = bufferedReader.readLine()) != null) {
body.append(nextLine);
body.append('\n');
}
} catch (IOException e) {
throw new RuntimeException(
"Could not open resource: " + resourceId, e);
} catch (Resources.NotFoundException nfe) {
throw new RuntimeException("Resource not found: "
+ resourceId, nfe);
}
return body.toString();
}
}

@ -1,95 +0,0 @@
package com.yinuo.library.vlc;
import android.net.Uri;
import com.common.commonlib.CommonApplication;
import org.videolan.libvlc.LibVLC;
import org.videolan.libvlc.Media;
import org.videolan.libvlc.MediaPlayCallback;
import org.videolan.libvlc.MediaPlayer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Objects;
/**
* Created by liwentian on 2017/10/12.
*/
public class RtspHelper {
private MediaPlayer mMediaPlayer;
private LibVLC mVlc;
private static RtspHelper sInstance = new RtspHelper();
private ByteBuffer mByteBuffer;
public static RtspHelper getInstance() {
return sInstance;
}
public interface RtspCallback {
void onPreviewFrame(ByteBuffer buffer, int width, int height);
}
private RtspHelper() {
}
public void createPlayer(String url, final int width, final int height, final RtspCallback callback) {
releasePlayer();
mByteBuffer = ByteBuffer.allocateDirect(width * height * 4)
.order(ByteOrder.nativeOrder());
try {
ArrayList<String> options = new ArrayList<String>();
options.add("--aout=opensles");
options.add("--audio-time-stretch"); // time stretching
options.add("-vvv"); // verbosity
options.add("--file-caching=2000");
options.add("--rtsp-frame-buffer-size=163840");
options.add("--rtsp-tcp");
mVlc = new LibVLC(Objects.requireNonNull(CommonApplication.Companion.getContext()), options);
// Create media player
mMediaPlayer = new MediaPlayer(mVlc);
mMediaPlayer.setVideoFormat("RGBA", width, height, width * 4);
mMediaPlayer.setVideoCallback(mByteBuffer, new MediaPlayCallback() {
@Override
public void onDisplay(final ByteBuffer byteBuffer) {
callback.onPreviewFrame(byteBuffer, width, height);
}
});
Media m = new Media(mVlc, Uri.parse(url));
int cache = 100;
m.setHWDecoderEnabled(true, false);
m.addOption(":network-caching=" + cache);
m.addOption(":live-cacheing=" + cache);
m.addOption(":sout-mux-caching=" + cache);
m.addOption(":clock-jitter=0");
m.addOption(":clock-synchro=0");
mMediaPlayer.setMedia(m);
mMediaPlayer.play();
} catch (Throwable e) {
e.printStackTrace();
}
}
public void releasePlayer() {
if (mVlc == null) {
return;
}
mMediaPlayer.setVideoCallback(null, null);
mMediaPlayer.stop();
mVlc.release();
mVlc = null;
}
}

@ -1,117 +0,0 @@
package com.yinuo.library.vlc;
import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
import static android.opengl.GLES20.GL_DEPTH_BUFFER_BIT;
import static android.opengl.GLES20.glClear;
import static android.opengl.GLES20.glClearColor;
import android.opengl.GLSurfaceView;
import com.yinuo.library.vlc.encoder.BaseMovieEncoder;
import com.yinuo.library.vlc.encoder.MovieEncoder1;
import com.yinuo.library.vlc.utils.LogUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* Created by liwentian on 2017/10/12.
*/
public class RtspSurfaceRender implements GLSurfaceView.Renderer, RtspHelper.RtspCallback {
private ByteBuffer mBuffer;
private GLSurfaceView mGLSurfaceView;
private RGBProgram mProgram;
private String mRtspUrl;
private BaseMovieEncoder mVideoEncoder;
public RtspSurfaceRender(GLSurfaceView glSurfaceView) {
mGLSurfaceView = glSurfaceView;
}
public void setRtspUrl(String url) {
mRtspUrl = url;
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void startRecording() {
mGLSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
if (!mVideoEncoder.isRecording()) {
// File output = CameraHelper.getOutputMediaFile(CameraHelper.MEDIA_TYPE_VIDEO, "");
// LogUtils.v(String.format("startRecording: %s", output));
// mVideoEncoder.startRecording(new BaseMovieEncoder.EncoderConfig(output, EGL14.eglGetCurrentContext()));
}
}
});
}
public void stopRecording() {
mGLSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
if (mVideoEncoder.isRecording()) {
mVideoEncoder.stopRecording();
}
}
});
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
LogUtils.v(String.format("onSurfaceChanged: width = %d, height = %d", width, height));
mProgram = new RGBProgram(mGLSurfaceView.getContext(), width, height);
mBuffer = ByteBuffer.allocateDirect(width * height * 4).order(ByteOrder.nativeOrder());
mVideoEncoder = new MovieEncoder1(mGLSurfaceView.getContext(), width, height, false);
RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, this);
}
public void onSurfaceDestoryed() {
RtspHelper.getInstance().releasePlayer();
}
@Override
public void onDrawFrame(GL10 gl) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(1f, 1f, 1f, 1f);
mProgram.useProgram();
synchronized (mBuffer) {
mProgram.setUniforms(mBuffer.array(), 0);
}
mProgram.draw();
}
@Override
public void onPreviewFrame(final ByteBuffer buffer, int width, int height) {
synchronized (mBuffer) {
mBuffer.rewind();
buffer.rewind();
mBuffer.put(buffer);
}
mGLSurfaceView.post(new Runnable() {
@Override
public void run() {
mVideoEncoder.frameAvailable(buffer.array(), System.nanoTime());
}
});
mGLSurfaceView.requestRender();
}
}

@ -1,147 +0,0 @@
package com.yinuo.library.vlc;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.opengl.EGL14;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.yinuo.library.vlc.encoder.BaseMovieEncoder;
import com.yinuo.library.vlc.encoder.MovieEncoder1;
import com.yinuo.library.vlc.utils.BitmapUtil;
import com.yinuo.library.vlc.utils.LogUtils;
import java.nio.ByteBuffer;
/**
* Created by liwentian on 2017/10/12.
*/
public class RtspSurfaceRender2 implements RtspHelper.RtspCallback {
private final SurfaceView mSurfaceView;
private String mRtspUrl;
private BaseMovieEncoder mVideoEncoder;
private final Object mBitmapLock = new Object();
private Bitmap mVideoBitmap = null;
private volatile boolean mIsResumed = false;
private IFrameCallBack mFrameCallBack = null;
private int mWidth, mheight;
// mSurfaceView 渲染线程
Thread renderThread = new Thread(new Runnable() {
@Override
public void run() {
while (true) {
if (mIsResumed) {
if (mVideoBitmap != null) {
Canvas canvas = mSurfaceView.getHolder().lockCanvas();
if (canvas != null) {
synchronized (mBitmapLock) {
canvas.drawBitmap(mVideoBitmap, null, new Rect(0, 0, canvas.getWidth(), canvas.getHeight()), null);
}
mSurfaceView.getHolder().unlockCanvasAndPost(canvas);
try {
Thread.sleep(25);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
}
}
});
public RtspSurfaceRender2(SurfaceView surfaceView) {
mSurfaceView = surfaceView;
SurfaceHolder holder = mSurfaceView.getHolder();
holder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
LogUtils.v(String.format("onSurfaceChanged: width = %d, height = %d", width, height));
mWidth = width;
mheight = height;
if (mVideoEncoder == null) {
LogUtils.v("init");
mVideoEncoder = new MovieEncoder1(mSurfaceView.getContext(), width, height, true);
mVideoBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, RtspSurfaceRender2.this);
renderThread.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
}
public void reStart(){
RtspHelper.getInstance().createPlayer(mRtspUrl, mWidth, mheight, RtspSurfaceRender2.this);
}
public void setRtspUrl(String url) {
mRtspUrl = url;
}
public void startRecording() {
mSurfaceView.post(() -> {
if (!mVideoEncoder.isRecording()) {
mVideoEncoder.startRecording(new BaseMovieEncoder.EncoderConfig(EGL14.eglGetCurrentContext()));
}
});
}
public void stopRecording() {
mSurfaceView.post(() -> {
if (mVideoEncoder.isRecording()) {
mVideoEncoder.stopRecording();
}
});
}
public void onDestoryed() {
stopRecording();
RtspHelper.getInstance().releasePlayer();
}
@Override
public void onPreviewFrame(final ByteBuffer buffer, int width, int height) {
if (mFrameCallBack != null){
mFrameCallBack.onFrame();
}
synchronized (mBitmapLock) {
Bitmap overLayBitmap = TxtOverlay.INSTANCE.getOverlayBitmap();
mVideoBitmap.copyPixelsFromBuffer(buffer.position(0));
if (overLayBitmap != null) {
mVideoBitmap = BitmapUtil.mergeBitmap(mVideoBitmap, overLayBitmap);
buffer.clear();
mVideoBitmap.copyPixelsToBuffer(buffer);
}
mVideoEncoder.frameAvailable(buffer.array(), System.nanoTime());
}
}
public void onResume() {
this.mIsResumed = true;
}
public void onPause() {
this.mIsResumed = false;
}
public void setFrameCallBack(IFrameCallBack mFrameCallBack) {
this.mFrameCallBack = mFrameCallBack;
}
}

@ -1,168 +0,0 @@
package com.yinuo.library.vlc;
import android.util.Log;
import static android.opengl.GLES20.GL_COMPILE_STATUS;
import static android.opengl.GLES20.GL_FRAGMENT_SHADER;
import static android.opengl.GLES20.GL_LINK_STATUS;
import static android.opengl.GLES20.GL_VALIDATE_STATUS;
import static android.opengl.GLES20.GL_VERTEX_SHADER;
import static android.opengl.GLES20.glAttachShader;
import static android.opengl.GLES20.glCompileShader;
import static android.opengl.GLES20.glCreateProgram;
import static android.opengl.GLES20.glCreateShader;
import static android.opengl.GLES20.glDeleteProgram;
import static android.opengl.GLES20.glDeleteShader;
import static android.opengl.GLES20.glGetProgramiv;
import static android.opengl.GLES20.glGetShaderiv;
import static android.opengl.GLES20.glLinkProgram;
import static android.opengl.GLES20.glShaderSource;
import static android.opengl.GLES20.glValidateProgram;
/**
* Created by liwentian on 17/6/22.
*/
public class ShaderHelper {
private static final String TAG = "ShaderHelper";
/**
* Loads and compiles a vertex shader, returning the OpenGL object ID.
*/
public static int compileVertexShader(String shaderCode) {
return compileShader(GL_VERTEX_SHADER, shaderCode);
}
/**
* Loads and compiles a fragment shader, returning the OpenGL object ID.
*/
public static int compileFragmentShader(String shaderCode) {
return compileShader(GL_FRAGMENT_SHADER, shaderCode);
}
/**
* Compiles a shader, returning the OpenGL object ID.
*/
private static int compileShader(int type, String shaderCode) {
// Create a new shader object.
final int shaderObjectId = glCreateShader(type);
if (shaderObjectId == 0) {
Log.w(TAG, "Could not create new shader.");
return 0;
}
// Pass in the shader source.
glShaderSource(shaderObjectId, shaderCode);
// Compile the shader.
glCompileShader(shaderObjectId);
// Get the compilation status.
final int[] compileStatus = new int[1];
glGetShaderiv(shaderObjectId, GL_COMPILE_STATUS,
compileStatus, 0);
// Print the shader info log to the Android log output.
// Log.v(TAG, "Results of compiling source:" + "\n" + shaderCode
// + "\n:" + glGetShaderInfoLog(shaderObjectId));
// Verify the compile status.
if (compileStatus[0] == 0) {
// If it failed, delete the shader object.
glDeleteShader(shaderObjectId);
Log.w(TAG, "Compilation of shader failed.");
return 0;
}
// Return the shader object ID.
return shaderObjectId;
}
/**
* Links a vertex shader and a fragment shader together into an OpenGL
* program. Returns the OpenGL program object ID, or 0 if linking failed.
*/
public static int linkProgram(int vertexShaderId, int fragmentShaderId) {
// Create a new program object.
final int programObjectId = glCreateProgram();
if (programObjectId == 0) {
Log.w(TAG, "Could not create new program");
return 0;
}
// Attach the vertex shader to the program.
glAttachShader(programObjectId, vertexShaderId);
// Attach the fragment shader to the program.
glAttachShader(programObjectId, fragmentShaderId);
// Link the two shaders together into a program.
glLinkProgram(programObjectId);
// Get the link status.
final int[] linkStatus = new int[1];
glGetProgramiv(programObjectId, GL_LINK_STATUS,
linkStatus, 0);
// Print the program info log to the Android log output.
// Log.v(
// TAG,
// "Results of linking program:\n"
// + glGetProgramInfoLog(programObjectId));
// Verify the link status.
if (linkStatus[0] == 0) {
// If it failed, delete the program object.
glDeleteProgram(programObjectId);
Log.w(TAG, "Linking of program failed.");
return 0;
}
// Return the program object ID.
return programObjectId;
}
/**
* Validates an OpenGL program. Should only be called when developing the
* application.
*/
public static boolean validateProgram(int programObjectId) {
glValidateProgram(programObjectId);
final int[] validateStatus = new int[1];
glGetProgramiv(programObjectId, GL_VALIDATE_STATUS,
validateStatus, 0);
// Log.v(TAG, "Results of validating program: " + validateStatus[0]
// + "\nLog:" + glGetProgramInfoLog(programObjectId));
return validateStatus[0] != 0;
}
/**
* Helper function that compiles the shaders, links and validates the
* program, returning the program ID.
*/
public static int buildProgram(String vertexShaderSource,
String fragmentShaderSource) {
int program;
// Compile the shaders.
int vertexShader = compileVertexShader(vertexShaderSource);
int fragmentShader = compileFragmentShader(fragmentShaderSource);
// Link them into a shader program.
program = linkProgram(vertexShader, fragmentShader);
validateProgram(program);
return program;
}
}

@ -1,32 +0,0 @@
package com.yinuo.library.vlc;
import android.content.Context;
import static android.opengl.GLES20.glUseProgram;
/**
* Created by liwentian on 17/6/22.
*/
public class ShaderProgram {
public final String TAG = getClass().getSimpleName();
protected final int program;
protected final Context context;
protected int width, height;
protected ShaderProgram(Context context, int vertexId, int fragId, int width, int height) {
this.context = context;
this.width = width;
this.height = height;
program = ShaderHelper.buildProgram(ResourceUtils.readText(context, vertexId),
ResourceUtils.readText(context, fragId));
}
public void useProgram() {
glUseProgram(program);
}
}

@ -1,35 +0,0 @@
package com.yinuo.library.vlc;
import android.os.AsyncTask;
/**
* Created by liwentian on 2017/10/12.
*/
public abstract class Task extends AsyncTask<Void, Void, Void> {
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
}
@Override
protected void onProgressUpdate(Void... values) {
super.onProgressUpdate(values);
}
@Override
protected void onCancelled(Void aVoid) {
super.onCancelled(aVoid);
}
@Override
protected void onCancelled() {
super.onCancelled();
}
}

@ -1,42 +0,0 @@
package com.yinuo.library.vlc;
import android.os.AsyncTask;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
/**
* Created by liwentian on 2017/10/12.
*/
public class TaskUtils {
public static void execute(final Runnable runnable) {
execute(AsyncTask.THREAD_POOL_EXECUTOR, runnable);
}
public static <V> Future<V> submit(final Callable<V> callable) {
return ((ExecutorService) AsyncTask.THREAD_POOL_EXECUTOR).submit(callable);
}
public static void execute(final Executor executor, final Runnable runnable) {
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
runnable.run();
return null;
}
}.executeOnExecutor(executor);
}
public static void execute(final Task task) {
execute(AsyncTask.THREAD_POOL_EXECUTOR, task);
}
public static void execute(final Executor executor, final Task task) {
task.executeOnExecutor(executor);
}
}

@ -1,67 +0,0 @@
package com.yinuo.library.vlc
import android.graphics.Bitmap
import android.graphics.Color
import android.text.TextUtils
import org.easydarwin.util.YUVUtils
import java.text.SimpleDateFormat
/**
* Created by John on 2017/2/23.
*/
object TxtOverlay {
// 提示变化的监听
private var mTipChangeListener: (() -> Unit)? = null;
// 上一次展示的提示文字
private var mLastShowTip = ""
// 待展示的提示文字
private var mToDoShowTip = ""
// 外部调用,设置待显示水印文字
fun setShowTip(string: String) {
mToDoShowTip = string
buildOverlayBitmap()
mTipChangeListener?.invoke()
}
// 上一次展示的时间
private var lastTipUpdateTime: Long = 0
// 文字生成的bitmap
private var bmp: Bitmap? = null
// 时间格式化字符串
private val dateFormat = SimpleDateFormat("yy-MM-dd HH:mm:ss")
fun buildOverlayBitmap(): Bitmap? {
if (TextUtils.isEmpty(mToDoShowTip)) {
return null
}
val currentTimeMillis = System.currentTimeMillis()
// 限制获取bitmap的频率保证性能
if (TextUtils.isEmpty(mLastShowTip) || mToDoShowTip != mLastShowTip || currentTimeMillis - lastTipUpdateTime > 1000) {
// 记录更新时间和上一次的文字
lastTipUpdateTime = currentTimeMillis
mLastShowTip = mToDoShowTip
// // 回收内存
// bmp?.recycle()
// 文字转bitmap
bmp = YUVUtils.generateBitmap(
dateFormat.format(lastTipUpdateTime) + "@" + mToDoShowTip, 40, Color.WHITE
)
// 缩放旋转bitmap
// bmp = YUVUtils.rotateImage(bmp, 0);
}
return bmp
}
fun getOverlayBitmap(): Bitmap? {
return bmp;
}
fun setTipChangeListener(onChange: () -> Unit) {
mTipChangeListener = onChange
}
}

@ -1,119 +0,0 @@
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.text.TextUtils;
import com.common.commonlib.db.DBUtils;
import com.common.commonlib.db.entity.Video;
import com.yinuo.library.vlc.utils.LogUtils;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Created by liwentian on 17/8/1.
*/
public class AndroidMuxer {
private final int mExpectedNumTracks = 1;
private MediaFormat mVideoFormat = null;
private MediaMuxer mMuxer;
private volatile boolean mStarted;
private volatile int mNumTracks;
private volatile int mNumReleases;
private long mStartRecordTime = 0L;
private String mCurrentPath;
private long mRecordTime = -1;
private static final long DEFAULT_RECORD_DURATION = 30 * 60 * 1000;
public AndroidMuxer() {
createMediaMuxer();
}
private void createMediaMuxer() {
try {
long timeMillis = System.currentTimeMillis();
File output = CameraHelper.getOutputMediaFile(CameraHelper.MEDIA_TYPE_VIDEO, timeMillis, DEFAULT_RECORD_DURATION);
mCurrentPath = output.getAbsolutePath();
mRecordTime = timeMillis;
LogUtils.v(String.format("startRecording: %s", output));
mMuxer = new MediaMuxer(mCurrentPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mStarted = false;
} catch (IOException e) {
e.printStackTrace();
}
}
private void insertToDB(long timeMillis, String filePath) {
String[] splits = filePath.split("/");
Video cacheVideo = new Video(timeMillis, splits[splits.length - 1], false, filePath);
DBUtils.INSTANCE.insertCacheVideo(cacheVideo);
}
public int addTrack(MediaFormat trackFormat) {
if (mStarted) {
throw new IllegalStateException();
}
synchronized (mMuxer) {
int track = mMuxer.addTrack(trackFormat);
mVideoFormat = trackFormat;
if (++mNumTracks == mExpectedNumTracks) {
mMuxer.start();
mStarted = true;
}
return track;
}
}
public boolean isStarted() {
return mStarted;
}
public void writeSampleData(int trackIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
synchronized (mMuxer) {
mMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
}
if (mStartRecordTime <= 0L) {
mStartRecordTime = System.currentTimeMillis();
} else {
long recordTime = System.currentTimeMillis() - mStartRecordTime;
if (recordTime > DEFAULT_RECORD_DURATION) {
stopMuxer();
mNumTracks = 0;
mStartRecordTime = 0L;
createMediaMuxer();
addTrack(mVideoFormat);
}
}
}
public boolean release() {
synchronized (mMuxer) {
if (++mNumReleases == mNumTracks) {
stopMuxer();
return true;
}
}
return false;
}
public void stopMuxer() {
mMuxer.stop();
mMuxer.release();
if (mRecordTime > 0 && mRecordTime < System.currentTimeMillis() && !TextUtils.isEmpty(mCurrentPath)) {
insertToDB(mRecordTime, mCurrentPath);
}
}
}

@ -1,249 +0,0 @@
package com.yinuo.library.vlc.encoder;
import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
import android.view.Surface;
import com.yinuo.library.vlc.PushHelper;
import com.yinuo.library.vlc.utils.LogUtils;
import org.easydarwin.easypusher.BuildConfig;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Created by liwentian on 17/7/30.
*/
public class AudioEncoderCore extends MediaEncoderCore implements Runnable {
private static final String TAG = "audio_record";
// AAC Low Overhead Audio Transport Multiplex
private static final String MIME_TYPE = "audio/mp4a-latm";
// AAC frame size. Audio encoder input size is a multiple of this
protected static final int SAMPLES_PER_FRAME = 1024;
protected static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
private int mSampleRate = 8000;
private int mChannelCount = 1;
private int mBitRate = 16000;
private int mMaxInputSize = 1920;
private AudioRecord mAudioRecord;
private int mChannelConfig;
public AudioEncoderCore(AndroidMuxer muxer) {
super(muxer);
prepareEncoder();
prepareRecorder();
}
private void prepareEncoder() {
MediaFormat format = new MediaFormat();
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setString(MediaFormat.KEY_MIME, MIME_TYPE);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mSampleRate);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mChannelCount);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, mMaxInputSize);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
LogUtils.e(e);
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
LogUtils.v(String.format("%s prepareEncoder, mEncoder = %s", getClass().getSimpleName(), mEncoder));
}
@SuppressLint("MissingPermission")
private void prepareRecorder() {
switch (mChannelCount) {
case 1:
mChannelConfig = AudioFormat.CHANNEL_IN_MONO;
break;
case 2:
mChannelConfig = AudioFormat.CHANNEL_IN_STEREO;
break;
default:
throw new IllegalArgumentException();
}
int minBufferSize = AudioRecord.getMinBufferSize(mSampleRate,
mChannelConfig, AUDIO_FORMAT);
mAudioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, // source
mSampleRate, // sample rate, hz
mChannelConfig, // channels
AUDIO_FORMAT, // audio format
minBufferSize); // buffer size (bytes)
}
@Override
public void start() {
if (!mRecording) {
// mRecording = true;
// mAudioRecord.startRecording();
//
// TaskUtils.execute(this);
}
}
@Override
public void stop() {
mRecording = false;
}
@Override
public Surface getInputSurface() {
return null;
}
@Override
protected boolean isSurfaceInput() {
return false;
}
@Override
public void run() {
while (mRecording) {
// drainEncoder(false);
// drainAudio(false);
}
// drainAudio(true);
mAudioRecord.stop();
// drainEncoder(true);
release();
}
private void drainAudio(boolean endOfStream) {
// LogUtils.v(String.format("drainAudio %b", endOfStream));
ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
int bufferIndex = mEncoder.dequeueInputBuffer(-1); // wait indefinitely
if (bufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[bufferIndex];
inputBuffer.clear();
int len = mAudioRecord.read(inputBuffer, SAMPLES_PER_FRAME * 2); // read blocking
long ptsUs = System.nanoTime() / 1000;
if (endOfStream) {
mEncoder.queueInputBuffer(bufferIndex, 0, len, ptsUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
mEncoder.queueInputBuffer(bufferIndex, 0, len, ptsUs, 0);
}
}
}
@Override
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer mBuffer = ByteBuffer.allocate(10240);
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
// mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
encodedData.get(mBuffer.array(), 7, mBufferInfo.size);
encodedData.clear();
mBuffer.position(7 + mBufferInfo.size);
addADTStoPacket(mBuffer.array(), mBufferInfo.size + 7);
mBuffer.flip();
PushHelper.INSTANCE.pushData(mBuffer.array(), mBufferInfo.size + 7, mBufferInfo.presentationTimeUs / 1000);
if (BuildConfig.DEBUG)
Log.i(TAG, String.format("push audio stamp:%d", mBufferInfo.presentationTimeUs / 1000));
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
private void addADTStoPacket(byte[] packet, int packetLen) {
packet[0] = (byte) 0xFF;
packet[1] = (byte) 0xF1;
packet[2] = (byte) (((2 - 1) << 6) + (11 << 2) + (1 >> 2));
packet[3] = (byte) (((1 & 3) << 6) + (packetLen >> 11));
packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
packet[6] = (byte) 0xFC;
}
}

@ -1,339 +0,0 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.content.Context;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import com.yinuo.library.vlc.utils.LogUtils;
import java.lang.ref.WeakReference;
/**
* Encode a movie from frames rendered from an external texture image.
* <p>
* The object wraps an encoder running on a dedicated thread. The various control messages
* may be sent from arbitrary threads (typically the app UI thread). The encoder thread
* manages both sides of the encoder (feeding and draining); the only external input is
* the GL texture.
* <p>
* The design is complicated slightly by the need to create an EGL context that shares state
* with a view that gets restarted if (say) the device orientation changes. When the view
* in question is a GLSurfaceView, we don't have full control over the EGL context creation
* on that side, so we have to bend a bit backwards here.
* <p>
* To use:
* <ul>
* <li>create TextureMovieEncoder object
* <li>create an EncoderConfig
* <li>call TextureMovieEncoder#startRecording() with the config
* <li>call TextureMovieEncoder#setTextureId() with the texture object that receives frames
* <li>for each frame, after latching it with SurfaceTexture#updateTexImage(),
* call TextureMovieEncoder#frameAvailable().
* </ul>
* <p>
* TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
*/
public abstract class BaseMovieEncoder implements Runnable {
static final int MSG_START_RECORDING = 0;
static final int MSG_STOP_RECORDING = 1;
static final int MSG_FRAME_AVAILABLE = 2;
static final int MSG_QUIT = 4;
// ----- accessed exclusively by encoder thread -----
private WindowSurface mInputWindowSurface;
private EglCore mEglCore;
private MediaEncoderCore mVideoEncoder;
private AudioEncoderCore mAudioEncoder;
// ----- accessed by multiple threads -----
protected volatile EncoderHandler mHandler;
private Object mReadyFence = new Object(); // guards ready/running
private volatile boolean mReady;
private volatile boolean mRunning;
protected Context mContext;
protected int mWidth, mHeight;
private boolean useHevc = false;
public BaseMovieEncoder(Context context, int width, int height, boolean hevc) {
mContext = context;
mWidth = width;
mHeight = height;
useHevc = hevc;
}
/**
* Encoder configuration.
* <p>
* Object is immutable, which means we can safely pass it between threads without
* explicit synchronization (and don't need to worry about it getting tweaked out from
* under us).
* <p>
* TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
* with reasonable defaults for those and bit rate.
*/
public static class EncoderConfig {
AndroidMuxer mMuxer;
final EGLContext mEglContext;
public EncoderConfig(EGLContext sharedEglContext) {
mEglContext = sharedEglContext;
mMuxer = new AndroidMuxer();
}
@Override
public String toString() {
return "EncoderConfig: " + "' ctxt=" + mEglContext;
}
}
/**
* Tells the video recorder to start recording. (Call from non-encoder thread.)
* <p>
* Creates a new thread, which will create an encoder using the provided configuration.
* <p>
* Returns after the recorder thread has started and is ready to accept Messages. The
* encoder may not yet be fully configured.
*/
public void startRecording(EncoderConfig config) {
synchronized (mReadyFence) {
if (mRunning) {
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
LogUtils.v(String.format("startRecording called"));
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
/**
* Tells the video recorder to stop recording. (Call from non-encoder thread.)
* <p>
* Returns immediately; the encoder/muxer may not yet be finished creating the movie.
* <p>
* TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
* so we can provide reasonable status UI (and let the caller know that movie encoding
* has completed).
*/
public void stopRecording() {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public abstract void onPrepareEncoder();
public abstract void onFrameAvailable(Object o, long timestamp);
/**
* Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
* <p>
* This function sends a message and returns immediately. This isn't sufficient -- we
* don't want the caller to latch a new frame until we're done with this one -- but we
* can get away with it so long as the input frame rate is reasonable and the encoder
* thread doesn't stall.
* <p>
* TODO: either block here until the texture has been rendered onto the encoder surface,
* or have a separate "block if still busy" method that the caller can execute immediately
* before it calls updateTexImage(). The latter is preferred because we don't want to
* stall the caller while this thread does work.
*/
public void frameAvailable(Object object, long timestamp) {
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
if (timestamp == 0) {
// Seeing this after device is toggled off/on with power button. The
// first frame back has a zero timestamp.
//
// MPEG4Writer thinks this is cause to abort() in native code, so it's very
// important that we just ignore the frame.
return;
}
onFrameAvailable(object, timestamp);
}
/**
* Encoder thread entry point. Establishes Looper/Handler and waits for messages.
* <p>
*
* @see Thread#run()
*/
@Override
public void run() {
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
}
}
/**
* Handles encoder state change requests. The handler is created on the encoder thread.
*/
static class EncoderHandler extends Handler {
private WeakReference<BaseMovieEncoder> mWeakEncoder;
public EncoderHandler(BaseMovieEncoder encoder) {
mWeakEncoder = new WeakReference<BaseMovieEncoder>(encoder);
}
@Override // runs on encoder thread
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
BaseMovieEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable(timestamp);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Starts recording.
*/
private void handleStartRecording(EncoderConfig config) {
LogUtils.v(String.format("handleStartRecording"));
prepareEncoder(config.mMuxer, config.mEglContext, mWidth, mHeight);
onPrepareEncoder();
}
/**
* Handles notification of an available frame.
* <p>
* The texture is rendered onto the encoder's input surface, along with a moving
* box (just because we can).
* <p>
*
* @param timestampNanos The frame's timestamp, from SurfaceTexture.
*/
private void handleFrameAvailable(long timestampNanos) {
mVideoEncoder.start();
mAudioEncoder.start();
onFrameAvailable();
mInputWindowSurface.setPresentationTime(timestampNanos);
mInputWindowSurface.swapBuffers();
}
public abstract void onFrameAvailable();
/**
* Handles a request to stop encoding.
*/
private void handleStopRecording() {
mVideoEncoder.stop();
mAudioEncoder.stop();
releaseEncoder();
}
private void prepareEncoder(AndroidMuxer muxer, EGLContext sharedContext, int width, int height) {
mWidth = width;
mHeight = height;
if (useHevc){
mVideoEncoder = new VideoEncoderCoreHevc(muxer, width, height);
}else {
mVideoEncoder = new VideoEncoderCoreAvc(muxer, width, height);
}
mAudioEncoder = new AudioEncoderCore(muxer);
mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
mInputWindowSurface.makeCurrent();
}
private void releaseEncoder() {
mVideoEncoder.release();
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
}
}

@ -1,216 +0,0 @@
package com.yinuo.library.vlc.encoder;
import android.annotation.TargetApi;
import android.app.Activity;
import android.hardware.Camera;
import android.os.Build;
import android.os.Environment;
import android.view.Surface;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Locale;
/**
* Created by liwentian on 2017/8/29.
*/
public class CameraHelper {
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static int getFrontCameraId() {
int frontIdx = 0;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
frontIdx = i;
break;
}
}
return frontIdx;
}
public static int getDisplayOrientation(Activity activity, int cameraId) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else {
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
/**
* Iterate over supported camera video sizes to see which one best fits the
* dimensions of the given view while maintaining the aspect ratio. If none can,
* be lenient with the aspect ratio.
*
* @param supportedVideoSizes Supported camera video sizes.
* @param previewSizes Supported camera preview sizes.
* @param w The width of the view.
* @param h The height of the view.
* @return Best match camera video size to fit in the view.
*/
public static Camera.Size getOptimalVideoSize(List<Camera.Size> supportedVideoSizes,
List<Camera.Size> previewSizes, int w, int h) {
// Use a very small tolerance because we want an exact match.
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
// Supported video sizes list might be null, it means that we are allowed to use the preview
// sizes
List<Camera.Size> videoSizes;
if (supportedVideoSizes != null) {
videoSizes = supportedVideoSizes;
} else {
videoSizes = previewSizes;
}
Camera.Size optimalSize = null;
// Start with max value and refine as we iterate over available video sizes. This is the
// minimum difference between view and camera height.
double minDiff = Double.MAX_VALUE;
// Target view height
int targetHeight = h;
// Try to find a video size that matches aspect ratio and the target view size.
// Iterate over all available sizes and pick the largest size that can fit in the view and
// still maintain the aspect ratio.
for (Camera.Size size : videoSizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
continue;
if (Math.abs(size.height - targetHeight) < minDiff && previewSizes.contains(size)) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find video size that matches the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : videoSizes) {
if (Math.abs(size.height - targetHeight) < minDiff && previewSizes.contains(size)) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
/**
* @return the default camera on the device. Return null if there is no camera on the device.
*/
public static Camera getDefaultCameraInstance() {
int front = getFrontCameraId();
return Camera.open(front);
}
/**
* @return the default rear/back facing camera on the device. Returns null if camera is not
* available.
*/
public static Camera getDefaultBackFacingCameraInstance() {
return getDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
}
/**
* @return the default front facing camera on the device. Returns null if camera is not
* available.
*/
public static Camera getDefaultFrontFacingCameraInstance() {
return getDefaultCamera(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
/**
* @param position Physical position of the camera i.e Camera.CameraInfo.CAMERA_FACING_FRONT
* or Camera.CameraInfo.CAMERA_FACING_BACK.
* @return the default camera on the device. Returns null if camera is not available.
*/
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
private static Camera getDefaultCamera(int position) {
// Find the total number of cameras available
int mNumberOfCameras = Camera.getNumberOfCameras();
// Find the ID of the back-facing ("default") camera
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < mNumberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == position) {
return Camera.open(i);
}
}
return null;
}
/**
* Creates a media file in the {@code Environment.DIRECTORY_PICTURES} directory. The directory
* is persistent and available to other applications like gallery.
*
* @param type Media type. Can be video or image.
* @param defaultRecordDuration
* @return A file object pointing to the newly created file.
*/
public static File getOutputMediaFile(int type, long time, long duration) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
if (!Environment.getExternalStorageState().equalsIgnoreCase(Environment.MEDIA_MOUNTED)) {
return null;
}
File mediaStorageDir = Environment.getExternalStoragePublicDirectory("video");
if (!mediaStorageDir.exists() && !mediaStorageDir.mkdirs()) {
return null;
}
// Create a media file name
SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmss", Locale.CHINA);
String timeStamp = format.format(time);
String endTimeStamp = format.format(time + duration);
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + "_" + endTimeStamp + ".mp4");
} else {
return null;
}
return mediaFile;
}
}

@ -1,376 +0,0 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Core EGL state (display, context, config).
* <p>
* The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
*/
public final class EglCore {
private static final String TAG = GlUtil.TAG;
/**
* Constructor flag: surface must be recordable. This discourages EGL from using a
* pixel format that cannot be converted efficiently to something usable by the video
* encoder.
*/
public static final int FLAG_RECORDABLE = 0x01;
/**
* Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
* flag, GLES2 is used.
*/
public static final int FLAG_TRY_GLES3 = 0x02;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLConfig mEGLConfig = null;
private int mGlVersion = -1;
/**
* Prepares EGL display and context.
* <p>
* Equivalent to EglCore(null, 0).
*/
public EglCore() {
this(null, 0);
}
/**
* Prepares EGL display and context.
* <p>
* @param sharedContext The context to share, or null if sharing is not desired.
* @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
*/
public EglCore(EGLContext sharedContext, int flags) {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
if (sharedContext == null) {
sharedContext = EGL14.EGL_NO_CONTEXT;
}
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Try to get a GLES3 context, if requested.
if ((flags & FLAG_TRY_GLES3) != 0) {
//Log.d(TAG, "Trying GLES 3");
EGLConfig config = getConfig(flags, 3);
if (config != null) {
int[] attrib3_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib3_list, 0);
if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
//Log.d(TAG, "Got GLES 3 config");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 3;
}
}
}
if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
//Log.d(TAG, "Trying GLES 2");
EGLConfig config = getConfig(flags, 2);
if (config == null) {
throw new RuntimeException("Unable to find a suitable EGLConfig");
}
int[] attrib2_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib2_list, 0);
checkEglError("eglCreateContext");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 2;
}
// Confirm with query.
int[] values = new int[1];
EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
values, 0);
Log.d(TAG, "EGLContext created, client version " + values[0]);
}
/**
* Finds a suitable EGLConfig.
*
* @param flags Bit flags from constructor.
* @param version Must be 2 or 3.
*/
private EGLConfig getConfig(int flags, int version) {
int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
if (version >= 3) {
renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
}
// The actual surface is generally RGBA or RGBX, so situationally omitting alpha
// doesn't really help. It can also lead to a huge performance hit on glReadPixels()
// when reading into a GL_RGBA buffer.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
//EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, renderableType,
EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
EGL14.EGL_NONE
};
if ((flags & FLAG_RECORDABLE) != 0) {
attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
attribList[attribList.length - 2] = 1;
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
return configs[0];
}
/**
* Discards all resources held by this class, notably the EGL context. This must be
* called from the thread where the context was created.
* <p>
* On completion, no context will be current.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// Android is unusual in that it uses a reference-counted EGLDisplay. So for
// every eglInitialize() we need an eglTerminate().
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLConfig = null;
}
@Override
protected void finalize() throws Throwable {
try {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// We're limited here -- finalizers don't run on the thread that holds
// the EGL state, so if a surface or context is still current on another
// thread we can't fully release it here. Exceptions thrown from here
// are quietly discarded. Complain in the log file.
Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
release();
}
} finally {
super.finalize();
}
}
/**
* Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
* still current in a context.
*/
public void releaseSurface(EGLSurface eglSurface) {
EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
}
/**
* Creates an EGL surface associated with a Surface.
* <p>
* If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
*/
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new RuntimeException("invalid surface: " + surface);
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
public EGLSurface createOffscreenSurface(int width, int height) {
int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Makes our EGL context current, using the supplied surface for both "draw" and "read".
*/
public void makeCurrent(EGLSurface eglSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Makes our EGL context current, using the supplied "draw" and "read" surfaces.
*/
public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent(draw,read) failed");
}
}
/**
* Makes no context current.
*/
public void makeNothingCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers(EGLSurface eglSurface) {
return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
}
/**
* Returns true if our context and the specified surface are current.
*/
public boolean isCurrent(EGLSurface eglSurface) {
return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
}
/**
* Performs a simple surface query.
*/
public int querySurface(EGLSurface eglSurface, int what) {
int[] value = new int[1];
EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
return value[0];
}
/**
* Queries a string value.
*/
public String queryString(int what) {
return EGL14.eglQueryString(mEGLDisplay, what);
}
/**
* Returns the GLES version this context is configured for (currently 2 or 3).
*/
public int getGlVersion() {
return mGlVersion;
}
/**
* Writes the current display, context, and surface to the log.
*/
public static void logCurrent(String msg) {
EGLDisplay display;
EGLContext context;
EGLSurface surface;
display = EGL14.eglGetCurrentDisplay();
context = EGL14.eglGetCurrentContext();
surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
", surface=" + surface);
}
/**
* Checks for EGL errors. Throws an exception if an error has been raised.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
public EGLContext getEGLContext() {
return mEGLContext;
}
}

@ -1,197 +0,0 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.graphics.Bitmap;
import android.opengl.EGL14;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.util.Log;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Common base class for EGL surfaces.
* <p>
* There can be multiple surfaces associated with a single context.
*/
public class EglSurfaceBase {
protected static final String TAG = GlUtil.TAG;
// EglCore object we're associated with. It may be associated with multiple surfaces.
protected EglCore mEglCore;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
private int mWidth = -1;
private int mHeight = -1;
protected EglSurfaceBase(EglCore eglCore) {
mEglCore = eglCore;
}
/**
* Creates a window surface.
* <p>
* @param surface May be a Surface or SurfaceTexture.
*/
public void createWindowSurface(Object surface) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createWindowSurface(surface);
// Don't cache width/height here, because the size of the underlying surface can change
// out from under us (see e.g. HardwareScalerActivity).
//mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
//mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
}
/**
* Creates an off-screen surface.
*/
public void createOffscreenSurface(int width, int height) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
/**
* Returns the surface's width, in pixels.
* <p>
* If this is called on a window surface, and the underlying surface is in the process
* of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
* callback). The size should match after the next buffer swap.
*/
public int getWidth() {
if (mWidth < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
} else {
return mWidth;
}
}
/**
* Returns the surface's height, in pixels.
*/
public int getHeight() {
if (mHeight < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
} else {
return mHeight;
}
}
/**
* Release the EGL surface.
*/
public void releaseEglSurface() {
mEglCore.releaseSurface(mEGLSurface);
mEGLSurface = EGL14.EGL_NO_SURFACE;
mWidth = mHeight = -1;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
mEglCore.makeCurrent(mEGLSurface);
}
/**
* Makes our EGL context and surface current for drawing, using the supplied surface
* for reading.
*/
public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers() {
boolean result = mEglCore.swapBuffers(mEGLSurface);
if (!result) {
Log.d(TAG, "WARNING: swapBuffers() failed");
}
return result;
}
/**
* Sends the presentation time stamp to EGL.
*
* @param nsecs Timestamp, in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
mEglCore.setPresentationTime(mEGLSurface, nsecs);
}
/**
* Saves the EGL surface to a file.
* <p>
* Expects that this object's EGL surface is current.
*/
public void saveFrame(File file) throws IOException {
if (!mEglCore.isCurrent(mEGLSurface)) {
throw new RuntimeException("Expected EGL context/surface is not current");
}
// glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
// data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
// constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
// Bitmap "copy pixels" method wants the same format GL provides.
//
// Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
// here often.
//
// Making this even more interesting is the upside-down nature of GL, which means
// our output will look upside down relative to what appears on screen if the
// typical GL conventions are used.
String filename = file.toString();
int width = getWidth();
int height = getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
buf.order(ByteOrder.LITTLE_ENDIAN);
GLES20.glReadPixels(0, 0, width, height,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkGlError("glReadPixels");
buf.rewind();
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(filename));
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(buf);
bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
bmp.recycle();
} finally {
if (bos != null) bos.close();
}
Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
}
}

@ -1,195 +0,0 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.opengl.GLES20;
import android.opengl.GLES30;
import android.opengl.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Some OpenGL utility functions.
*/
public class GlUtil {
public static final String TAG = "Grafika";
/** Identity matrix for general use. Don't modify or life will get weird. */
public static final float[] IDENTITY_MATRIX;
static {
IDENTITY_MATRIX = new float[16];
Matrix.setIdentityM(IDENTITY_MATRIX, 0);
}
private static final int SIZEOF_FLOAT = 4;
private GlUtil() {} // do not instantiate
/**
* Creates a new program from the supplied vertex and fragment shaders.
*
* @return A handle to the program, or 0 on failure.
*/
public static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
/**
* Compiles the provided shader source.
*
* @return A handle to the shader, or 0 on failure.
*/
public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
/**
* Checks to see if a GLES error has been raised.
*/
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
Log.e(TAG, msg);
throw new RuntimeException(msg);
}
}
/**
* Checks to see if the location we obtained is valid. GLES returns -1 if a label
* could not be found, but does not set the GL error.
* <p>
* Throws a RuntimeException if the location is invalid.
*/
public static void checkLocation(int location, String label) {
if (location < 0) {
throw new RuntimeException("Unable to locate '" + label + "' in program");
}
}
/**
* Creates a texture from raw data.
*
* @param data Image data, in a "direct" ByteBuffer.
* @param width Texture width, in pixels (not bytes).
* @param height Texture height, in pixels.
* @param format Image data format (use constant appropriate for glTexImage2D(), e.g. GL_RGBA).
* @return Handle to texture.
*/
public static int createImageTexture(ByteBuffer data, int width, int height, int format) {
int[] textureHandles = new int[1];
int textureHandle;
GLES20.glGenTextures(1, textureHandles, 0);
textureHandle = textureHandles[0];
GlUtil.checkGlError("glGenTextures");
// Bind the texture handle to the 2D texture target.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
// Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
// is smaller or larger than the source image.
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GlUtil.checkGlError("loadImageTexture");
// Load the data from the buffer into the texture handle.
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
GlUtil.checkGlError("loadImageTexture");
return textureHandle;
}
/**
* Allocates a direct float buffer, and populates it with the float array data.
*/
public static FloatBuffer createFloatBuffer(float[] coords) {
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
/**
* Writes GL version info to the log.
*/
public static void logVersionInfo() {
Log.i(TAG, "vendor : " + GLES20.glGetString(GLES20.GL_VENDOR));
Log.i(TAG, "renderer: " + GLES20.glGetString(GLES20.GL_RENDERER));
Log.i(TAG, "version : " + GLES20.glGetString(GLES20.GL_VERSION));
if (false) {
int[] values = new int[1];
GLES30.glGetIntegerv(GLES30.GL_MAJOR_VERSION, values, 0);
int majorVersion = values[0];
GLES30.glGetIntegerv(GLES30.GL_MINOR_VERSION, values, 0);
int minorVersion = values[0];
if (GLES30.glGetError() == GLES30.GL_NO_ERROR) {
Log.i(TAG, "iversion: " + majorVersion + "." + minorVersion);
}
}
}
}

@ -1,118 +0,0 @@
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.os.Build;
import android.os.Bundle;
import android.view.Surface;
import com.yinuo.library.vlc.utils.LogUtils;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
/**
* Created by liwentian on 17/8/1.
*/
public abstract class MediaEncoderCore {
protected final String TAG = getClass().getSimpleName();
protected static final boolean VERBOSE = true;
protected AndroidMuxer mMuxer;
protected MediaCodec mEncoder;
protected int mTrackIndex = -1;
protected volatile boolean mRecording;
protected MediaCodec.BufferInfo mBufferInfo;
public MediaEncoderCore(AndroidMuxer muxer) {
LogUtils.v(String.format("%s <init> called", getClass().getSimpleName()));
mMuxer = muxer;
mBufferInfo = new MediaCodec.BufferInfo();
}
public abstract void start();
public abstract void stop();
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
* <p>
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
long timeStamp;
public abstract Surface getInputSurface();
public abstract void drainEncoder(boolean endOfStream);
void buildKeyFrame() {
if (System.currentTimeMillis() - timeStamp >= 1000) {//1000毫秒后设置参数
timeStamp = System.currentTimeMillis();
if (Build.VERSION.SDK_INT >= 23) {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mEncoder.setParameters(params);
}
}
}
public void release() {
LogUtils.v(String.format("%s.release", getClass().getSimpleName()));
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mMuxer != null) {
mMuxer.release();
}
}
protected abstract boolean isSurfaceInput();
/**
*
*
* @param buffer
* @param offset
* @param length
* @param path
* @param append
*/
public static void save(byte[] buffer, int offset, int length, String path, boolean append) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(path, append);
fos.write(buffer, offset, length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.flush();
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}

@ -1,63 +0,0 @@
package com.yinuo.library.vlc.encoder;
import android.content.Context;
import com.yinuo.library.vlc.RGBProgram;
import com.yinuo.library.vlc.utils.LogUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created by liwentian on 2017/10/31.
*/
public class MovieEncoder1 extends BaseMovieEncoder {
private volatile RGBProgram mRGBProgram;
private volatile ByteBuffer mYUVBuffer;
public MovieEncoder1(Context context, int width, int height, boolean hevc) {
super(context, width, height, hevc);
}
@Override
public void onPrepareEncoder() {
LogUtils.v(String.format("onPrepareEncoder width = %d, height = %d", mWidth, mHeight));
mRGBProgram = new RGBProgram(mContext, mWidth, mHeight);
mYUVBuffer = ByteBuffer.allocateDirect(mWidth * mHeight * 4)
.order(ByteOrder.nativeOrder());
}
@Override
public void onFrameAvailable(Object object, long timestamp) {
byte[] data = (byte[]) object;
if (mYUVBuffer == null) {
return;
}
// LogUtils.v(String.format("onFrameAvailable: data = %d, buffer = %d", data.length, mYUVBuffer.capacity()));
synchronized (mYUVBuffer) {
mYUVBuffer.position(0);
int len = Math.min(mYUVBuffer.capacity(), data.length);
mYUVBuffer.put(data, 0, len);
}
if (mHandler != null) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp));
}
}
@Override
public void onFrameAvailable() {
mRGBProgram.useProgram();
synchronized (mYUVBuffer) {
mRGBProgram.setUniforms(mYUVBuffer.array());
}
mRGBProgram.draw();
}
}

@ -1,205 +0,0 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import com.yinuo.library.vlc.PushHelper;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCoreAvc extends MediaEncoderCore {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 24; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int BIT_RATE = 4000000;
private Surface mInputSurface;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCoreAvc(AndroidMuxer muxer, int width, int height) {
super(muxer);
prepareEncoder(width, height);
PushHelper.INSTANCE.startStream(false);
}
private void prepareEncoder(int width, int height) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
@Override
public void drainEncoder(boolean endOfStream) {
buildKeyFrame();
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
byte[] mSpsPps = new byte[0];
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
byte[] outData = new byte[mBufferInfo.size];
//从buff中读取数据到outData中
encodedData.get(outData);
//记录pps和spspps和sps数据开头是0x00 0x00 0x00 0x01 0x67
// 0x67对应十进制103
if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 103) {
mSpsPps = outData;
} else if (outData[0] == 0 && outData[1] == 0 && outData[2] == 0
&& outData[3] == 1 && outData[4] == 101) {
//关键帧开始规则是0x00 0x00 0x00 0x01 0x650x65对应十进制101
//在关键帧前面加上pps和sps数据
byte[] iframeData = new byte[mSpsPps.length + outData.length];
System.arraycopy(mSpsPps, 0, iframeData, 0, mSpsPps.length);
System.arraycopy(outData, 0, iframeData, mSpsPps.length, outData.length);
outData = iframeData;
}
//save(outData, 0, outData.length, Environment.getExternalStorageDirectory() + "/easy.h264", true);
PushHelper.INSTANCE.pushData(outData, outData.length, mBufferInfo.presentationTimeUs / 1000);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
/**
* Returns the encoder's input surface.
*/
@Override
public Surface getInputSurface() {
return mInputSurface;
}
@Override
public void start() {
drainEncoder(false);
}
@Override
public void stop() {
drainEncoder(true);
}
@Override
protected boolean isSurfaceInput() {
return true;
}
}

@ -1,205 +0,0 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import com.yinuo.library.vlc.PushHelper;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCoreHevc extends MediaEncoderCore {
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/hevc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 24; // 30fps
private static final int IFRAME_INTERVAL = 1; // 5 seconds between I-frames
private static final int BIT_RATE = 4000000;
private Surface mInputSurface;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCoreHevc(AndroidMuxer muxer, int width, int height) {
super(muxer);
prepareEncoder(width, height);
PushHelper.INSTANCE.startStream(true);
}
private void prepareEncoder(int width, int height) {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
/**
* Returns the encoder's input surface.
*/
@Override
public Surface getInputSurface() {
return mInputSurface;
}
public static final int NAL_I = 19;
public static final int NAL_VPS = 32;
private byte[] vps_sps_pps_buf;
@Override
public void drainEncoder(boolean endOfStream) {
buildKeyFrame();
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream && isSurfaceInput()) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
byte[] mSpsPps = new byte[0];
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if (!mMuxer.isStarted()) {
mBufferInfo.size = 0;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
byte[] outData = new byte[mBufferInfo.size];
encodedData.get(outData);
int offset = 4;
if (outData[2] == 0x01) {
offset = 3;
}
int type = (outData[offset] & 0x7E) >> 1;
if (type == NAL_VPS) {
vps_sps_pps_buf = outData;
} else if (type == NAL_I) {
byte[] newBuf = new byte[vps_sps_pps_buf.length + outData.length];
System.arraycopy(vps_sps_pps_buf, 0, newBuf, 0, vps_sps_pps_buf.length);
System.arraycopy(outData, 0, newBuf, vps_sps_pps_buf.length, outData.length);
outData = newBuf;
}
PushHelper.INSTANCE.pushData(outData, outData.length, mBufferInfo.presentationTimeUs / 1000);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
}
}
}
@Override
public void start() {
drainEncoder(false);
}
@Override
public void stop() {
drainEncoder(true);
}
@Override
protected boolean isSurfaceInput() {
return true;
}
}

@ -1,90 +0,0 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yinuo.library.vlc.encoder;
import android.graphics.SurfaceTexture;
import android.view.Surface;
/**
* Recordable EGL window surface.
* <p>
* It's good practice to explicitly release() the surface, preferably from a "finally" block.
*/
public class WindowSurface extends EglSurfaceBase {
private Surface mSurface;
private boolean mReleaseSurface;
/**
* Associates an EGL surface with the native window surface.
* <p>
* Set releaseSurface to true if you want the Surface to be released when release() is
* called. This is convenient, but can interfere with framework classes that expect to
* manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
* surfaceDestroyed() callback won't fire).
*/
public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
super(eglCore);
createWindowSurface(surface);
mSurface = surface;
mReleaseSurface = releaseSurface;
}
/**
* Associates an EGL surface with the SurfaceTexture.
*/
public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
super(eglCore);
createWindowSurface(surfaceTexture);
}
/**
* Releases any resources associated with the EGL surface (and, if configured to do so,
* with the Surface as well).
* <p>
* Does not require that the surface's EGL context be current.
*/
public void release() {
releaseEglSurface();
if (mSurface != null) {
if (mReleaseSurface) {
mSurface.release();
}
mSurface = null;
}
}
/**
* Recreate the EGLSurface, using the new EglBase. The caller should have already
* freed the old EGLSurface with releaseEglSurface().
* <p>
* This is useful when we want to update the EGLSurface associated with a Surface.
* For example, if we want to share with a different EGLContext, which can only
* be done by tearing down and recreating the context. (That's handled by the caller;
* this just creates a new EGLSurface for the Surface we were handed earlier.)
* <p>
* If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
* context somewhere, the create call will fail with complaints from the Surface
* about already being connected.
*/
public void recreate(EglCore newEglCore) {
if (mSurface == null) {
throw new RuntimeException("not yet implemented for SurfaceTexture");
}
mEglCore = newEglCore; // switch to new context
createWindowSurface(mSurface); // create new surface
}
}

@ -1,190 +0,0 @@
package com.yinuo.library.vlc.encoder;
import android.content.Context;
import android.content.res.Configuration;
import android.opengl.GLES20;
import android.opengl.Matrix;
import com.yinuo.library.vlc.R;
import com.yinuo.library.vlc.ShaderProgram;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import static android.opengl.GLES20.GL_CLAMP_TO_EDGE;
import static android.opengl.GLES20.GL_FLOAT;
import static android.opengl.GLES20.GL_LINEAR;
import static android.opengl.GLES20.GL_LUMINANCE;
import static android.opengl.GLES20.GL_LUMINANCE_ALPHA;
import static android.opengl.GLES20.GL_TEXTURE0;
import static android.opengl.GLES20.GL_TEXTURE1;
import static android.opengl.GLES20.GL_TEXTURE_2D;
import static android.opengl.GLES20.GL_TEXTURE_MAG_FILTER;
import static android.opengl.GLES20.GL_TEXTURE_MIN_FILTER;
import static android.opengl.GLES20.GL_TEXTURE_WRAP_S;
import static android.opengl.GLES20.GL_TEXTURE_WRAP_T;
import static android.opengl.GLES20.GL_TRIANGLE_STRIP;
import static android.opengl.GLES20.GL_UNSIGNED_BYTE;
import static android.opengl.GLES20.glActiveTexture;
import static android.opengl.GLES20.glBindTexture;
import static android.opengl.GLES20.glDrawArrays;
import static android.opengl.GLES20.glEnableVertexAttribArray;
import static android.opengl.GLES20.glGenTextures;
import static android.opengl.GLES20.glGetAttribLocation;
import static android.opengl.GLES20.glGetUniformLocation;
import static android.opengl.GLES20.glTexImage2D;
import static android.opengl.GLES20.glTexParameterf;
import static android.opengl.GLES20.glUniform1i;
import static android.opengl.GLES20.glVertexAttribPointer;
/**
* Created by liwentian on 17/8/16.
*/
/**
* CameraNV21
*/
public class YUVProgram extends ShaderProgram {
protected final int mUniformYTextureLocation;
protected final int mUniformUVTextureLocation;
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
public static final float TEXTURE_UPSIDE_DOWN[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
public static final float TEXTURE_NO_ROTATION[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
// Attribute locations
private final int aPositionLocation;
private final int aTextureCoordinatesLocation;
private final int uMVPMatrixLocation;
private final FloatBuffer mGLCubeBuffer;
private final FloatBuffer mGLTextureBuffer;
private int mYTestureId, mUVTextureId;
private ByteBuffer mYBuffer, mUVBuffer;
public YUVProgram(Context context, int width, int height) {
super(context, R.raw.yuv_vertex, R.raw.yuv_fragment, width, height);
mUniformYTextureLocation = glGetUniformLocation(program, "y_texture");
mUniformUVTextureLocation = glGetUniformLocation(program, "uv_texture");
uMVPMatrixLocation = glGetUniformLocation(program, "uMVPMatrix");
aPositionLocation = glGetAttribLocation(program, "a_Position");
aTextureCoordinatesLocation = glGetAttribLocation(program, "a_TextureCoordinates");
mGLCubeBuffer = ByteBuffer.allocateDirect(CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer = ByteBuffer.allocateDirect(TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mYBuffer = ByteBuffer.allocateDirect(width * height)
.order(ByteOrder.nativeOrder());
mUVBuffer = ByteBuffer.allocateDirect(width * height / 2)
.order(ByteOrder.nativeOrder());
int[] textures = new int[2];
glGenTextures(2, textures, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textures[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
mYTestureId = textures[0];
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, textures[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
mUVTextureId = textures[1];
mGLCubeBuffer.clear();
mGLCubeBuffer.put(CUBE).position(0);
mGLTextureBuffer.clear();
mGLTextureBuffer.put(TEXTURE_NO_ROTATION).position(0);
}
public void setUpsideDown() {
mGLTextureBuffer.clear();
mGLTextureBuffer.put(TEXTURE_UPSIDE_DOWN).position(0);
}
public void setUniforms(byte[] data) {
mYBuffer.position(0);
mYBuffer.put(data, 0, width * height);
mUVBuffer.position(0);
mUVBuffer.put(data, width * height, width * height / 2);
mYBuffer.position(0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mYTestureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height,
0, GL_LUMINANCE, GL_UNSIGNED_BYTE, mYBuffer);
glUniform1i(mUniformYTextureLocation, 0);
GlUtil.checkGlError("init YTexture");
mUVBuffer.position(0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, mUVTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, width / 2, height / 2,
0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, mUVBuffer);
glUniform1i(mUniformUVTextureLocation, 1);
float[] matrix = new float[16];
Matrix.setIdentityM(matrix, 0);
int orientation = context.getResources().getConfiguration().orientation;
int degrees = orientation == Configuration.ORIENTATION_LANDSCAPE ? 0 : -90;
Matrix.rotateM(matrix, 0, degrees, 0.0f, 0.0f, 1.0f);
GLES20.glUniformMatrix4fv(uMVPMatrixLocation, 1, false, matrix, 0);
GlUtil.checkGlError("init UVTexture");
mGLCubeBuffer.position(0);
glVertexAttribPointer(aPositionLocation, 2, GL_FLOAT, false, 0, mGLCubeBuffer);
glEnableVertexAttribArray(aPositionLocation);
mGLTextureBuffer.position(0);
glVertexAttribPointer(aTextureCoordinatesLocation, 2, GL_FLOAT, false, 0, mGLTextureBuffer);
glEnableVertexAttribArray(aTextureCoordinatesLocation);
}
public void draw() {
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
}

@ -1,36 +0,0 @@
package com.yinuo.library.vlc.utils;
import android.graphics.Bitmap;
import android.graphics.Canvas;
public class BitmapUtil {
/**
*
*
* @param backBitmap
* @param frontBitmap
* @return
*/
public static Bitmap mergeBitmap(Bitmap backBitmap, Bitmap frontBitmap) {
if (backBitmap == null || backBitmap.isRecycled()
|| frontBitmap == null || frontBitmap.isRecycled()) {
return backBitmap;
}
//create the new blank bitmap 创建一个新的和SRC长度宽度一样的位图
Bitmap newbmp = Bitmap.createBitmap(backBitmap.getWidth(), backBitmap.getHeight(), Bitmap.Config.ARGB_8888);
Canvas cv = new Canvas(newbmp);
//draw bg into
cv.drawBitmap(backBitmap, 0, 0, null);//在 00坐标开始画入bg
//draw fg into
cv.drawBitmap(frontBitmap, 100, 100, null);//在 00坐标开始画入fg ,可以从任意位置画入
//save all clip
cv.save();//保存
//store
cv.restore();//存储
// 回收内存
backBitmap.recycle();
return newbmp;
}
}

@ -1,61 +0,0 @@
package com.yinuo.library.vlc.utils;
import android.util.Log;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
/**
* Created by liwentian on 17/8/16.
*/
public class LogUtils {
private static final String TAG = "watcher";
public static void v(String msg) {
Log.v(TAG, msg);
}
public static void v(String tag, String msg) {
Log.v(tag, msg);
}
public static void e(String msg) {
Log.e(TAG, msg);
}
public static void e(String tag, String msg) {
Log.e(tag, msg);
}
public static void w(String msg) {
Log.w(TAG, msg);
}
public static void w(String tag, String msg) {
Log.w(tag, msg);
}
public static void e(Throwable e) {
String s = getThrowableString(e);
e(s);
}
private static String getThrowableString(Throwable e) {
Writer writer = new StringWriter();
PrintWriter printWriter = new PrintWriter(writer);
while (e != null) {
e.printStackTrace(printWriter);
e = e.getCause();
}
String text = writer.toString();
printWriter.close();
return text;
}
}

@ -1,7 +0,0 @@
precision mediump float;
varying vec4 v_Color;
void main() {
gl_FragColor = v_Color;
}

@ -1,11 +0,0 @@
attribute vec4 a_Position;
attribute vec4 a_Color;
varying vec4 v_Color;
uniform mat4 u_Matrix;
void main() {
v_Color = a_Color;
gl_Position = u_Matrix * a_Position;
}

@ -1,15 +0,0 @@
precision mediump float;
varying vec2 v_TextureCoordinates;
uniform sampler2D s_texture;
void main() {
float r, g, b;
r = texture2D(s_texture, v_TextureCoordinates).r;
g = texture2D(s_texture, v_TextureCoordinates).g;
b = texture2D(s_texture, v_TextureCoordinates).b;
gl_FragColor = vec4(r, g, b, 1.0);
}

@ -1,11 +0,0 @@
attribute vec4 a_Position;
attribute vec2 a_TextureCoordinates;
varying vec2 v_TextureCoordinates;
uniform mat4 u_Matrix;
void main() {
v_TextureCoordinates = a_TextureCoordinates;
gl_Position = u_Matrix * a_Position;
}

@ -1,14 +0,0 @@
precision mediump float;
varying vec2 v_TextureCoordinates;
uniform sampler2D s_texture;
void main() {
gl_FragColor = texture2D(s_texture, v_TextureCoordinates);
// float r = texture2D(s_texture, v_TextureCoordinates).r;
// float g = texture2D(s_texture, v_TextureCoordinates).g;
// float b = texture2D(s_texture, v_TextureCoordinates).b;
// gl_FragColor = vec4(1.0, g, b, 1.0);
}

@ -1,9 +0,0 @@
attribute vec4 a_Position;
attribute vec2 a_TextureCoordinates;
varying vec2 v_TextureCoordinates;
void main() {
v_TextureCoordinates = a_TextureCoordinates;
gl_Position = a_Position;
}

@ -1,20 +0,0 @@
precision mediump float;
varying vec2 v_TextureCoordinates;
uniform sampler2D y_texture;
uniform sampler2D uv_texture;
void main() {
float r, g, b, y, u, v;
y = texture2D(y_texture, v_TextureCoordinates).r;
u = texture2D(uv_texture, v_TextureCoordinates).a - 0.5;
v = texture2D(uv_texture, v_TextureCoordinates).r - 0.5;
r = y + 1.13983 * v;
g = y - 0.39465 * u - 0.58060 * v;
b = y + 2.03211 * u;
gl_FragColor = vec4(r, g, b, 1.0);
}

@ -1,11 +0,0 @@
attribute vec4 a_Position;
attribute vec2 a_TextureCoordinates;
uniform mat4 uMVPMatrix;
varying vec2 v_TextureCoordinates;
void main() {
v_TextureCoordinates = a_TextureCoordinates;
gl_Position = uMVPMatrix * a_Position;
}

@ -1,3 +0,0 @@
<resources>
<string name="app_name">library-vlc</string>
</resources>
Loading…
Cancel
Save