desc:使用surfaceView渲染视频

main
xiaowusky 2 years ago
parent be4c2b9c4f
commit 4d78e48101

@ -1,11 +1,10 @@
package com.yinuo.safetywatcher.watcher.ui
import android.content.Intent
import android.opengl.GLSurfaceView
import android.os.Build
import android.view.View
import androidx.annotation.RequiresApi
import com.yinuo.library.vlc.RtspSurfaceRender
import com.yinuo.library.vlc.RtspSurfaceRender2
import com.yinuo.safetywatcher.databinding.ActivityHomeBinding
import com.yinuo.safetywatcher.watcher.base.NoOptionsActivity
import com.yinuo.safetywatcher.watcher.constant.CAMERA_URL
@ -20,6 +19,8 @@ class HomeActivity : NoOptionsActivity() {
ActivityHomeBinding.inflate(layoutInflater)
}
private var mRender: RtspSurfaceRender2? = null
override fun getTopBarTitle(): String? {
return null;
}
@ -46,7 +47,9 @@ class HomeActivity : NoOptionsActivity() {
itemRecovery.setOnClickListener {
mBinding.tvWarn.visibility = View.GONE
}
cameraSwitch.setOnCheckedChangeListener { buttonView, isChecked -> }
cameraSwitch.setOnCheckedChangeListener { buttonView, isChecked ->
if (isChecked) mRender?.startRecording() else mRender?.stopRecording()
}
itemSetting.post {
itemSetting.requestFocus()
@ -55,11 +58,14 @@ class HomeActivity : NoOptionsActivity() {
}
private fun setForCamera() {
mBinding.surface.setEGLContextClientVersion(3);
val mRender = RtspSurfaceRender(mBinding.surface)
mRender.setRtspUrl(CAMERA_URL)
mBinding.surface.setRenderer(mRender)
mBinding.surface.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
// mBinding.surface.setEGLContextClientVersion(3);
// val mRender = RtspSurfaceRender(mBinding.surface)
// mRender.setRtspUrl(CAMERA_URL)
// mBinding.surface.setRenderer(mRender)
// mBinding.surface.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
mRender = RtspSurfaceRender2(mBinding.surface2)
mRender?.setRtspUrl(CAMERA_URL)
}
private fun initTopbarHelper() {
@ -78,11 +84,11 @@ class HomeActivity : NoOptionsActivity() {
override fun onResume() {
super.onResume()
mBinding.surface.onResume()
// mBinding.surface.onResume()
}
override fun onStop() {
super.onStop()
mBinding.surface.onPause()
// mBinding.surface.onPause()
}
}

@ -6,6 +6,12 @@
<android.opengl.GLSurfaceView
android:id="@+id/surface"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:visibility="gone"/>
<SurfaceView
android:id="@+id/surface2"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<LinearLayout

@ -12,5 +12,6 @@ dependencies {
implementation(name: 'libvlc-3.0.0', ext: 'aar')
implementation project(path: ':library-common')
implementation project(path: ':library-push')
}

@ -0,0 +1,162 @@
package com.yinuo.library.vlc;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.opengl.EGL14;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.yinuo.library.vlc.encoder.BaseMovieEncoder;
import com.yinuo.library.vlc.encoder.CameraHelper;
import com.yinuo.library.vlc.encoder.MovieEncoder1;
import com.yinuo.library.vlc.utils.LogUtils;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Created by liwentian on 2017/10/12.
*/
public class RtspSurfaceRender2 implements RtspHelper.RtspCallback {
private ByteBuffer mBuffer;
private SurfaceView mSurfaceView;
private String mRtspUrl;
private BaseMovieEncoder mVideoEncoder;
Bitmap videoBitmap = null;
Bitmap overLayBitmap = null;
TxtOverlay overlay = null;
// mSurfaceView 渲染线程
Thread renderThread = new Thread(new Runnable() {
@Override
public void run() {
while (true) {
if (videoBitmap != null) {
Canvas canvas = mSurfaceView.getHolder().lockCanvas();
if (canvas != null) {
if (videoBitmap != null) {
synchronized (videoBitmap) {
canvas.drawBitmap(videoBitmap, null, new Rect(0, 0, canvas.getWidth(), canvas.getHeight()), null);
}
}
mSurfaceView.getHolder().unlockCanvasAndPost(canvas);
}
try {
Thread.sleep(20);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
});
public RtspSurfaceRender2(SurfaceView surfaceView) {
mSurfaceView = surfaceView;
SurfaceHolder holder = mSurfaceView.getHolder();
holder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
overlay = new TxtOverlay();
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
LogUtils.v(String.format("onSurfaceChanged: width = %d, height = %d", width, height));
mBuffer = ByteBuffer.allocateDirect(width * height * 4).order(ByteOrder.nativeOrder());
mVideoEncoder = new MovieEncoder1(mSurfaceView.getContext(), width, height);
renderThread.start();
videoBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
RtspHelper.getInstance().createPlayer(mRtspUrl, width, height, RtspSurfaceRender2.this);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
}
public void setRtspUrl(String url) {
mRtspUrl = url;
}
public void startRecording() {
mSurfaceView.post(() -> {
if (!mVideoEncoder.isRecording()) {
File output = CameraHelper.getOutputMediaFile(CameraHelper.MEDIA_TYPE_VIDEO, "");
LogUtils.v(String.format("startRecording: %s", output));
mVideoEncoder.startRecording(new BaseMovieEncoder.EncoderConfig(output, EGL14.eglGetCurrentContext()));
}
});
}
public void stopRecording() {
mSurfaceView.post(() -> {
if (mVideoEncoder.isRecording()) {
mVideoEncoder.stopRecording();
}
});
}
public void onSurfaceDestoryed() {
RtspHelper.getInstance().releasePlayer();
}
@Override
public void onPreviewFrame(final ByteBuffer buffer, int width, int height) {
ByteBuffer newBuffer = null;
synchronized (videoBitmap) {
overLayBitmap = overlay.javaOverlayBm("1111111@2222222@333333");
videoBitmap.copyPixelsFromBuffer(buffer.position(0));
videoBitmap = mergeBitmap(videoBitmap, overLayBitmap);
newBuffer = ByteBuffer.allocateDirect(videoBitmap.getByteCount()).order(ByteOrder.nativeOrder());
videoBitmap.copyPixelsToBuffer(newBuffer);
}
ByteBuffer finalNewBuffer = newBuffer;
mSurfaceView.post(() -> {
if (finalNewBuffer != null) {
mVideoEncoder.frameAvailable(finalNewBuffer.array(), System.nanoTime());
} else {
mVideoEncoder.frameAvailable(buffer.array(), System.nanoTime());
}
});
}
/**
*
*
* @param backBitmap
* @param frontBitmap
* @return
*/
public static Bitmap mergeBitmap(Bitmap backBitmap, Bitmap frontBitmap) {
if (backBitmap == null || backBitmap.isRecycled()
|| frontBitmap == null || frontBitmap.isRecycled()) {
return backBitmap;
}
//create the new blank bitmap 创建一个新的和SRC长度宽度一样的位图
Bitmap newbmp = Bitmap.createBitmap(backBitmap.getWidth(), backBitmap.getHeight(), Bitmap.Config.ARGB_8888);
Canvas cv = new Canvas(newbmp);
//draw bg into
cv.drawBitmap(backBitmap, 0, 0, null);//在 00坐标开始画入bg
//draw fg into
cv.drawBitmap(frontBitmap, 100, 100, null);//在 00坐标开始画入fg ,可以从任意位置画入
//save all clip
cv.save();//保存
//store
cv.restore();//存储
return newbmp;
}
}

@ -0,0 +1,44 @@
package com.yinuo.library.vlc;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.text.TextUtils;
import org.easydarwin.util.YUVUtils;
import java.text.SimpleDateFormat;
/**
* Created by John on 2017/2/23.
*/
public class TxtOverlay {
String mTip = "";
long lastTipUpdateTime = 0;
Bitmap bmp;
SimpleDateFormat dateFormat = new SimpleDateFormat("yy-MM-dd HH:mm:ss");
public TxtOverlay() {
}
public Bitmap javaOverlayBm(String txt) {
if (TextUtils.isEmpty(txt)) {
return null;
}
long currentTimeMillis = System.currentTimeMillis();
// 限制获取bitmap的频率保证性能
if (TextUtils.isEmpty(mTip) || (!txt.equals(mTip) || currentTimeMillis - lastTipUpdateTime > 1000)) {
// 记录更新时间和上一次的文字
lastTipUpdateTime = currentTimeMillis;
mTip = txt;
// 文字转bitmap
bmp = YUVUtils.generateBitmap(dateFormat.format(lastTipUpdateTime) + "@" + txt, 40, Color.WHITE);
// 缩放旋转bitmap
// bmp = YUVUtils.rotateImage(bmp, 0);
}
return bmp;
}
}

@ -0,0 +1,214 @@
package com.yinuo.library.vlc.encoder;
import android.annotation.TargetApi;
import android.app.Activity;
import android.hardware.Camera;
import android.os.Build;
import android.os.Environment;
import android.view.Surface;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/**
* Created by liwentian on 2017/8/29.
*/
public class CameraHelper {
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static int getFrontCameraId() {
int frontIdx = 0;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
frontIdx = i;
break;
}
}
return frontIdx;
}
public static int getDisplayOrientation(Activity activity, int cameraId) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else {
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
/**
* Iterate over supported camera video sizes to see which one best fits the
* dimensions of the given view while maintaining the aspect ratio. If none can,
* be lenient with the aspect ratio.
*
* @param supportedVideoSizes Supported camera video sizes.
* @param previewSizes Supported camera preview sizes.
* @param w The width of the view.
* @param h The height of the view.
* @return Best match camera video size to fit in the view.
*/
public static Camera.Size getOptimalVideoSize(List<Camera.Size> supportedVideoSizes,
List<Camera.Size> previewSizes, int w, int h) {
// Use a very small tolerance because we want an exact match.
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
// Supported video sizes list might be null, it means that we are allowed to use the preview
// sizes
List<Camera.Size> videoSizes;
if (supportedVideoSizes != null) {
videoSizes = supportedVideoSizes;
} else {
videoSizes = previewSizes;
}
Camera.Size optimalSize = null;
// Start with max value and refine as we iterate over available video sizes. This is the
// minimum difference between view and camera height.
double minDiff = Double.MAX_VALUE;
// Target view height
int targetHeight = h;
// Try to find a video size that matches aspect ratio and the target view size.
// Iterate over all available sizes and pick the largest size that can fit in the view and
// still maintain the aspect ratio.
for (Camera.Size size : videoSizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
continue;
if (Math.abs(size.height - targetHeight) < minDiff && previewSizes.contains(size)) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
// Cannot find video size that matches the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : videoSizes) {
if (Math.abs(size.height - targetHeight) < minDiff && previewSizes.contains(size)) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
}
return optimalSize;
}
/**
* @return the default camera on the device. Return null if there is no camera on the device.
*/
public static Camera getDefaultCameraInstance() {
int front = getFrontCameraId();
return Camera.open(front);
}
/**
* @return the default rear/back facing camera on the device. Returns null if camera is not
* available.
*/
public static Camera getDefaultBackFacingCameraInstance() {
return getDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
}
/**
* @return the default front facing camera on the device. Returns null if camera is not
* available.
*/
public static Camera getDefaultFrontFacingCameraInstance() {
return getDefaultCamera(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
/**
* @param position Physical position of the camera i.e Camera.CameraInfo.CAMERA_FACING_FRONT
* or Camera.CameraInfo.CAMERA_FACING_BACK.
* @return the default camera on the device. Returns null if camera is not available.
*/
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
private static Camera getDefaultCamera(int position) {
// Find the total number of cameras available
int mNumberOfCameras = Camera.getNumberOfCameras();
// Find the ID of the back-facing ("default") camera
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < mNumberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == position) {
return Camera.open(i);
}
}
return null;
}
/**
* Creates a media file in the {@code Environment.DIRECTORY_PICTURES} directory. The directory
* is persistent and available to other applications like gallery.
*
* @param type Media type. Can be video or image.
* @return A file object pointing to the newly created file.
*/
public static File getOutputMediaFile(int type, String name) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
if (!Environment.getExternalStorageState().equalsIgnoreCase(Environment.MEDIA_MOUNTED)) {
return null;
}
File mediaStorageDir = Environment.getExternalStoragePublicDirectory("video");
if (!mediaStorageDir.exists() && !mediaStorageDir.mkdirs()) {
return null;
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.CHINA).format(new Date());
File mediaFile;
if (type == MEDIA_TYPE_IMAGE) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
} else if (type == MEDIA_TYPE_VIDEO) {
mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"VID_" + timeStamp + "_" + name + ".mp4");
} else {
return null;
}
return mediaFile;
}
}
Loading…
Cancel
Save