desc:yuv添加水印方式

main
xiaowusky 1 year ago
parent 4738d911a3
commit 449ca7b28e

@ -3,13 +3,12 @@ package com.yinuo.safetywatcher.watcher.ui
import android.content.Intent
import android.os.Build
import android.os.Process
import android.view.SurfaceHolder
import android.view.SurfaceHolder.Callback
import android.view.View
import androidx.annotation.RequiresApi
import androidx.lifecycle.lifecycleScope
import com.common.commonlib.db.DBUtils
import com.common.commonlib.utils.LogUtils
import com.common.commonlib.utils.MMKVUtils
import com.common.commonlib.utils.NetworkHelper
import com.yinuo.safetywatcher.R
import com.yinuo.safetywatcher.databinding.ActivityHomeBinding
@ -99,14 +98,23 @@ class HomeActivity : NoOptionsActivity() {
}
}
private var lastUpdateTime = 0L
/**
* 设置摄像头
*/
private fun setForCamera() {
mBinding.surface.holder.addCallback(object : Callback{
override fun surfaceCreated(holder: SurfaceHolder) {
mClient = EasyPlayerClient(this@HomeActivity, mBinding.surface.holder.surface, null) {
RecordHelper.onFrameAvailable(mBinding.surface)
mClient = EasyPlayerClient(
this@HomeActivity,
mBinding.surface,
null
) {
LogUtils.w("onI420Data c0")
RecordHelper.onFrameAvailable(mBinding.surface, it)
LogUtils.w("onI420Data c1")
val currentTimeMillis = System.currentTimeMillis()
if (currentTimeMillis - lastUpdateTime > 1000) {
lastUpdateTime = currentTimeMillis
if (!AppData.hasCameraData()) {
AppData.setCameraData(true)
changeViewStatus()
@ -114,22 +122,9 @@ class HomeActivity : NoOptionsActivity() {
}
watchCamera(DELAY_TIME_CHECK_CAMERA)
}
mClient?.play(CAMERA_URL)
}
override fun surfaceChanged(
holder: SurfaceHolder,
format: Int,
width: Int,
height: Int
) {
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
mClient?.stop()
LogUtils.w("onI420Data c2")
}
})
mClient?.play(CAMERA_URL)
// 第一次很慢所以10秒
watchCamera(DELAY_TIME_OPEN_CAMERA)
showLoadingDialog(R.string.connecting_camera)

@ -1,92 +1,65 @@
package com.yinuo.safetywatcher.watcher.utils
import android.graphics.Bitmap
import android.opengl.EGL14
import android.os.Handler
import android.os.HandlerThread
import android.view.PixelCopy
import android.view.PixelCopy.OnPixelCopyFinishedListener
import android.view.SurfaceView
import com.common.commonlib.CommonApplication
import com.common.commonlib.utils.BitmapUtils
import com.yinuo.library.vlc.encoder.BaseMovieEncoder.EncoderConfig
import com.yinuo.library.vlc.encoder.MovieEncoder1
import android.view.TextureView
import com.common.commonlib.utils.LogUtils
import com.yinuo.library.vlc.encoder.MediaCodecManager
import org.easydarwin.TxtOverlay
import java.nio.ByteBuffer
object RecordHelper {
private val mVideoEncoder: MovieEncoder1
private val codecManager: MediaCodecManager = MediaCodecManager.getInstance()
private const val width = 1920
private const val height = 1080
val utils by lazy {
NV21ToBitmap(CommonApplication.getContext())
}
private val workHandler by lazy {
val mHandlerThread = HandlerThread("recordAndEncode")
mHandlerThread.start()
Handler(mHandlerThread.looper)
}
private var recording = false;
init {
mVideoEncoder = MovieEncoder1(CommonApplication.getContext(), width, height, true)
codecManager.initCodecManager(width, height, 0)
}
fun onFrameAvailable(view: SurfaceView) {
if (!mVideoEncoder.isRecording) {
fun onFrameAvailable(view: TextureView, nv12Data: ByteArray) {
if (!recording) {
return
}
// workHandler.post {
// val nanoTime = System.nanoTime()
// var bitmap = view.bitmap
// bitmap?.let {
// val overLayBitmap: Bitmap? = TxtOverlay.getOverlayBitmap()
// overLayBitmap?.let {
// bitmap = BitmapUtils.mergeBitmap(bitmap!!, overLayBitmap)
// }
// }
// val buffer = ByteBuffer.allocate(bitmap!!.getByteCount())
// bitmap!!.copyPixelsToBuffer(buffer)
// bitmap!!.recycle()
// mVideoEncoder.frameAvailable(buffer.array(), nanoTime)
// }
workHandler.post {
var bitmap = Bitmap.createBitmap(
width,
height,
Bitmap.Config.ARGB_8888
)
PixelCopy.request(
view.holder.surface, bitmap, { copyResult ->
val nanoTime = System.nanoTime()
if (copyResult == PixelCopy.SUCCESS) {
bitmap?.let {
LogUtils.w("onI420Data 11111")
val overLayBitmap: Bitmap? = TxtOverlay.getOverlayBitmap()
overLayBitmap?.let {
bitmap = BitmapUtils.mergeBitmap(bitmap!!, overLayBitmap)
val yuv = TxtOverlay.getOverlayYuv()
yuv?.let {
for ((j, i) in (100 until overLayBitmap.height + 100).withIndex()) {
for (c in 0 until overLayBitmap.width) {
//去掉PNG水印的黑边
if (yuv[j * overLayBitmap.width + c]
.toInt() != 0x10 && yuv[j * overLayBitmap.width + c]
.toInt() != 0x80 && yuv[j * overLayBitmap.width + c].toInt() != 0xeb
) {
System.arraycopy(
yuv,
j * overLayBitmap.width + c,
nv12Data,
100 + i * 1920 + c,
1
)
}
}
}
val buffer = ByteBuffer.allocate(bitmap!!.byteCount)
bitmap!!.copyPixelsToBuffer(buffer)
bitmap!!.recycle()
mVideoEncoder.frameAvailable(buffer.array(), nanoTime)
}
}, workHandler
)
}
codecManager.addFrameData(nv12Data)
LogUtils.w("onI420Data 44444")
}
fun startRecording() {
if (!mVideoEncoder.isRecording) {
mVideoEncoder.startRecording(EncoderConfig(EGL14.eglGetCurrentContext()))
if (!recording) {
recording = true
codecManager.startMediaCodec()
}
}
fun stopRecording() {
if (mVideoEncoder.isRecording) {
mVideoEncoder.stopRecording()
if (recording) {
recording = false
codecManager.pauseMediaCodec()
}
}
}

@ -1,6 +1,17 @@
package com.yinuo.safetywatcher.watcher.utils;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
public class SnUtils {
public static String getDeviceSN() {
@ -14,4 +25,394 @@ public class SnUtils {
}
return serial;
}
public static int[] yuvI420toARGB(byte[] i420, int width, int height) {
int framesize = width * height;
int[] rgb = new int[framesize];//新图大小
for (int i = 0; i < framesize; i++) {
int index = (i / width) / 2 * (width / 2) + (i % width) / 2;
int y = i420[i] & 0x000000ff;//i420 y分量的值大小是一个字节准换成int
int u = i420[framesize + index] & 0x000000ff;//i420 u分量
int v = i420[framesize + framesize / 4 + index] & 0x000000ff;//i420 v
/*yuv----rgb 转换公式*/
int b = (int) (y + 1.8556 * (u - 128));
int g = (int) (y - (0.4681 * (v - 128) + 0.1872 * (u - 128)));
int r = (int) (y + 1.5748 * (v - 128));
/*防止越界*/
b = (b > 255) ? 255 : ((b < 0) ? 0 : b);
g = (g > 255) ? 255 : ((g < 0) ? 0 : g);
r = (r > 255) ? 255 : ((r < 0) ? 0 : r);
rgb[i] = (0xff000000) | (0x00ff0000 & r << 16) | (0x0000ff00 & g << 8) | (0x000000ff & b);
}
return rgb;
}
static byte[] intArrToByteArrLittleEndianBySystemApi(int[] array) {
ByteBuffer byteBuffer = ByteBuffer.allocate(array.length * 4);
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
IntBuffer intBuffer = byteBuffer.asIntBuffer();
intBuffer.put(array);
return byteBuffer.array();
}
static byte[] intArrToByteArrBigEndianBySystemApi(int[] array) {
ByteBuffer byteBuffer = ByteBuffer.allocate(array.length * 4);
byteBuffer.order(ByteOrder.BIG_ENDIAN);
IntBuffer intBuffer = byteBuffer.asIntBuffer();
intBuffer.put(array);
return byteBuffer.array();
}
static byte[] yuv = new byte[1920 * 1080 * 3 / 2];
public static byte[] rgb2YCbCr420(int[] pixels, int width, int height) {
int len = width * height;
// yuv格式数组大小y亮度占len长度u、v各占len/4长度
int y, u, v;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
// 屏蔽ARGB的透明度
int rgb = pixels[i * width + j] & 0x00FFFFFF;
// 像素的颜色顺序为bgr移位运算
int r = rgb & 0xFF;
int g = (rgb >> 8) & 0xFF;
int b = (rgb >> 16) & 0xFF;
// 套用公式
y = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
// 调整
y = y < 16 ? 16 : (y > 255 ? 255 : y);
u = u < 0 ? 0 : (u > 255 ? 255 : u);
v = v < 0 ? 0 : (v > 255 ? 255 : v);
// 复制
yuv[i * width + j] = (byte) y;
yuv[len + (i >> 1) * width + (j & ~1) + 0] = (byte) u;
yuv[len + +(i >> 1) * width + (j & ~1) + 1] = (byte) v;
}
}
return yuv;
}
public static void nv21ToRgba(byte[] input, int width, int height, byte[] output, boolean isRGB) {
int nvOff = width * height;
int i, j, yIndex = 0;
int y, u, v;
int r, g, b, nvIndex = 0;
//遍历图像每一个像素点依次计算r,g,b的值。
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++, ++yIndex) {
//对于位置为i,j的像素找到对应的y,u,v的值
nvIndex = (i / 2) * width + j - j % 2;
y = input[yIndex] & 0xff;
u = input[nvOff + nvIndex] & 0xff;
v = input[nvOff + nvIndex + 1] & 0xff;
//对于位置为i,j的像素根据其yuv的值计算出r,g,b的值
r = y + ((351 * (v - 128)) >> 8); //r
g = y - ((179 * (v - 128) + 86 * (u - 128)) >> 8); //g
b = y + ((443 * (u - 128)) >> 8); //b
//要求的rgb值是byte类型范围是0--255消除越界的值。
r = ((r > 255) ? 255 : (r < 0) ? 0 : r);
g = ((g > 255) ? 255 : (g < 0) ? 0 : g);
b = ((b > 255) ? 255 : (b < 0) ? 0 : b);
// 对结果rgb/bgr的值赋值a通道表示透明度这里都给255根据你自己的场景设置
//由于rgba格式和bgra格式只是r,g,b的顺序不同因此这里通过bool值判断既可以得到rgba的格式也可以得到bgra格式的数组。
if (isRGB) {
output[yIndex * 4 + 0] = (byte) b;
output[yIndex * 4 + 1] = (byte) g;
output[yIndex * 4 + 2] = (byte) r;
output[yIndex * 4 + 3] = (byte) 255;
} else {
output[yIndex * 4 + 0] = (byte) r;
output[yIndex * 4 + 1] = (byte) g;
output[yIndex * 4 + 2] = (byte) b;
output[yIndex * 4 + 3] = (byte) 255;
}
}
}
}
public static Bitmap nv12ToBitmap2(byte[] nv12Data, int width, int height) {
int[] rgbaData = new int[width * height];
int frameSize = width * height;
int yOffset = 0;
int uvOffset = frameSize;
for (int j = 0; j < height; j++) {
int rgbIndex = j * width;
int uvIndex = uvOffset + (j >> 1) * width;
for (int i = 0; i < width; i++) {
int y = nv12Data[yOffset + i] & 0xFF;
int uv = (nv12Data[uvIndex + (i & ~1)] & 0xFF) - 128;
int v = (nv12Data[uvIndex + (i & ~1) + 1] & 0xFF) - 128;
int r = (int) (y + 1.402 * v);
int g = (int) (y - 0.344136 * uv - 0.714136 * v);
int b = (int) (y + 1.772 * uv);
r = Math.max(0, Math.min(r, 255));
g = Math.max(0, Math.min(g, 255));
b = Math.max(0, Math.min(b, 255));
rgbaData[rgbIndex++] = Color.argb(255, r, g, b);
}
yOffset += width;
}
return Bitmap.createBitmap(rgbaData, width, height, Bitmap.Config.ARGB_8888);
}
public static Bitmap nv12ToBitmap(byte[] data, int w, int h) {
return spToBitmap(data, w, h, 0, 1);
}
public static Bitmap nv21ToBitmap(byte[] data, int w, int h) {
return spToBitmap(data, w, h, 1, 0);
}
static int[] colors = null;
private static Bitmap spToBitmap(byte[] data, int w, int h, int uOff, int vOff) {
int plane = w * h;
if (colors == null){
colors = new int[plane];
}
int yPos = 0, uvPos = plane;
for (int j = 0; j < h; j++) {
for (int i = 0; i < w; i++) {
// YUV byte to RGB int
final int y1 = data[yPos] & 0xff;
final int u = (data[uvPos + uOff] & 0xff) - 128;
final int v = (data[uvPos + vOff] & 0xff) - 128;
final int y1192 = 1192 * y1;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r);
g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g);
b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b);
colors[yPos] = ((r << 6) & 0xff0000) |
((g >> 2) & 0xff00) |
((b >> 10) & 0xff);
if ((yPos++ & 1) == 1) uvPos += 2;
}
if ((j & 1) == 0) uvPos -= w;
}
return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565);
}
public static Bitmap i420ToBitmap(byte[] data, int w, int h) {
return pToBitmap(data, w, h, true);
}
public static Bitmap yv12ToBitmap(byte[] data, int w, int h) {
return pToBitmap(data, w, h, false);
}
private static Bitmap pToBitmap(byte[] data, int w, int h, boolean uv) {
int plane = w * h;
int[] colors = new int[plane];
int off = plane >> 2;
int yPos = 0, uPos = plane + (uv ? 0 : off), vPos = plane + (uv ? off : 0);
for (int j = 0; j < h; j++) {
for (int i = 0; i < w; i++) {
// YUV byte to RGB int
final int y1 = data[yPos] & 0xff;
final int u = (data[uPos] & 0xff) - 128;
final int v = (data[vPos] & 0xff) - 128;
final int y1192 = 1192 * y1;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r);
g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g);
b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b);
colors[yPos] = ((r << 6) & 0xff0000) |
((g >> 2) & 0xff00) |
((b >> 10) & 0xff);
if ((yPos++ & 1) == 1) {
uPos++;
vPos++;
}
}
if ((j & 1) == 0) {
uPos -= (w >> 1);
vPos -= (w >> 1);
}
}
return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565);
// return Bitmap.createBitmap(colors, w, h, Bitmap.Config.ARGB_8888);
}
// public static byte[] nv12ToRgba(byte[] nv12Data, int width, int height) {
// int nv12Size = width * height * 12;
// byte[] rgbaData = new byte[nv12Size + 4 * (width * height)];
//
// int offset = 0;
// for (int y = 0; y < height; y++) {
// for (int x = 0; x < width; x++) {
// int nv12Offset = (y * width + x) * 12;
// int yValue = (nv12Data[nv12Offset] & 0xFF) << 8 | (nv12Data[nv12Offset + 1] & 0xFF);
// int uValue = (nv12Data[nv12Offset + 2] & 0xFF) << 8 | (nv12Data[nv12Offset + 3] & 0xFF);
// int vValue = (nv12Data[nv12Offset + 4] & 0xFF) << 8 | (nv12Data[nv12Offset + 5] & 0xFF);
//
// int r = (yValue + 16) >> 16;
// int g = ((66 * uValue + 128) >> 16) + r;
// int b = ((49 * vValue + 128) >> 16) + g;
// int a = 255;
//
// rgbaData[offset++] = (byte) r;
// rgbaData[offset++] = (byte) g;
// rgbaData[offset++] = (byte) b;
// rgbaData[offset++] = (byte) a;
// }
// }
//
// return rgbaData;
// }
public static byte[] nv12ToRGBA(byte[] nv12Data, int width, int height) {
byte[] rgbaData = new byte[width * height * 4];
int frameSize = width * height;
int nv12Index = 0;
int rgbaIndex = 0;
int yOffset = 0;
int uvOffset = frameSize;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
int y = (nv12Data[yOffset + i] & 0xFF);
int uvIndex = uvOffset + (i & ~1) + (j >> 1) * width;
int u = (nv12Data[uvIndex] & 0xFF) - 128;
int v = (nv12Data[uvIndex + 1] & 0xFF) - 128;
int r = (int) (y + 1.402 * v);
int g = (int) (y - 0.344136 * u - 0.714136 * v);
int b = (int) (y + 1.772 * u);
r = Math.max(0, Math.min(r, 255));
g = Math.max(0, Math.min(g, 255));
b = Math.max(0, Math.min(b, 255));
rgbaData[rgbaIndex++] = (byte) r;
rgbaData[rgbaIndex++] = (byte) g;
rgbaData[rgbaIndex++] = (byte) b;
rgbaData[rgbaIndex++] = (byte) 255;
}
yOffset += width;
}
return rgbaData;
}
public static void convertYUV420SemiPlanarToNV12(byte[] yuv420spData, byte[] nv12Data, int width, int height) {
int frameSize = width * height;
int chromaOffset = frameSize;
// 将 Y 分量复制到 NV12 数据中
System.arraycopy(yuv420spData, 0, nv12Data, 0, frameSize);
// 分离 U 和 V 分量,并交错复制到 NV12 数据中
for (int i = 0, j = 0; i < frameSize / 2; i += 2, j++) {
nv12Data[chromaOffset + j] = yuv420spData[frameSize + i]; // 复制 U 分量
nv12Data[chromaOffset + j + 1] = yuv420spData[frameSize + i + 1]; // 复制 V 分量
}
}
public static Bitmap i420To(byte[] i420, int width, int height){
YuvImage yuvImage = new YuvImage(i420, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, outputStream);
byte[] jpegData = outputStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
return bitmap;
}
public static Bitmap i420To2(byte[] yuv420spData){
int width = 1920; // 输入视频的宽度
int height = 1080; // 输入视频的高度
int uvPixelStride = 2; // 对于U和V分量交错存储的情况PixelStride为2
int[] argbData = new int[width * height];
int yIndex = 0;
int uvIndex = width * height;
for (int y = 0; y < height; y++) {
int pY = yIndex;
int pUV = uvIndex;
for (int x = 0; x < width; x++) {
int uv = (y >> 1) * 2 * width + (x & ~1) * uvPixelStride;
int Y = yuv420spData[pY++] & 0xFF;
int U = (yuv420spData[pUV] & 0xFF) - 128;
int V = (yuv420spData[pUV + 1] & 0xFF) - 128;
int R = Math.max(0, Math.min(255, Y + (int) (1.402f * V)));
int G = Math.max(0, Math.min(255, Y - (int) (0.344f * U + 0.714f * V)));
int B = Math.max(0, Math.min(255, Y + (int) (1.772f * U)));
int color = 0xFF000000 | (R << 16) | (G << 8) | B;
argbData[y * width + x] = color;
if (x % 2 == 1) {
pUV += uvPixelStride;
}
}
if (y % 2 == 1) {
uvIndex += width * uvPixelStride;
}
}
return Bitmap.createBitmap(argbData, width, height, Bitmap.Config.ARGB_8888);
}
public static Bitmap i420To3(byte[] yuv420spData){
int width = 1024;
int height = 576;
int[] argb = new int[width * height];
int pY = 0;
int pUV = width * height;
for (int i = 0, startY = 0; i < height; i++, startY += width) {
int uvIndex = (i / 2) * width;
for (int j = 0; j < width; j++) {
int uvOffset = (j / 2) << 1;
int u = (yuv420spData[pUV + uvIndex + uvOffset] & 0xFF) - 128;
int v = (yuv420spData[pUV + uvIndex + uvOffset + 1] & 0xFF) - 128;
int y = yuv420spData[pY + j] & 0xFF;
int r = Math.round(y + 1.402f * v);
int g = Math.round(y - 0.3441f * u - 0.7141f * v);
int b = Math.round(y + 1.772f * u);
r = Math.max(0, Math.min(255, r));
g = Math.max(0, Math.min(255, g));
b = Math.max(0, Math.min(255, b));
argb[startY + j] = 0xFF000000 | (r << 16) | (g << 8) | b;
}
}
return Bitmap.createBitmap(argb, width, height, Bitmap.Config.ARGB_8888);
}
}

@ -3,7 +3,7 @@
android:layout_width="match_parent"
android:layout_height="match_parent">
<SurfaceView
<TextureView
android:id="@+id/surface"
android:layout_width="@dimen/_1920dp"
android:layout_height="@dimen/_1080dp"

@ -16,11 +16,11 @@ object BitmapUtils {
return backBitmap
}
//create the new blank bitmap 创建一个新的和SRC长度宽度一样的位图
val newbmp =
Bitmap.createBitmap(backBitmap.width, backBitmap.height, Bitmap.Config.ARGB_8888)
val cv = Canvas(newbmp)
// val newbmp =
// Bitmap.createBitmap(backBitmap.width, backBitmap.height, Bitmap.Config.ARGB_8888)
val cv = Canvas(backBitmap)
//draw bg into
cv.drawBitmap(backBitmap, 0f, 0f, null) //在 00坐标开始画入bg
// cv.drawBitmap(backBitmap, 0f, 0f, null) //在 00坐标开始画入bg
//draw fg into
cv.drawBitmap(frontBitmap, 100f, 100f, null) //在 00坐标开始画入fg ,可以从任意位置画入
//save all clip
@ -28,7 +28,7 @@ object BitmapUtils {
//store
cv.restore() //存储
// 回收内存
backBitmap.recycle()
return newbmp
// backBitmap.recycle()
return backBitmap
}
}

@ -32,7 +32,6 @@ public class AndroidMuxer {
private long mStartRecordTime = 0L;
private String mCurrentPath;
private long mRecordTime = -1;
private static final long DEFAULT_RECORD_DURATION = 30 * 60 * 1000;
public AndroidMuxer() {
@ -44,7 +43,6 @@ public class AndroidMuxer {
long timeMillis = System.currentTimeMillis();
File output = CameraHelper.getOutputMediaFile(CameraHelper.MEDIA_TYPE_VIDEO, timeMillis, DEFAULT_RECORD_DURATION);
mCurrentPath = output.getAbsolutePath();
mRecordTime = timeMillis;
LogUtils.v(String.format("startRecording: %s", output));
mMuxer = new MediaMuxer(mCurrentPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mStarted = false;
@ -81,6 +79,9 @@ public class AndroidMuxer {
}
public void writeSampleData(int trackIndex, ByteBuffer encodedData, MediaCodec.BufferInfo bufferInfo) {
if (mNumReleases != 0) {
return;
}
synchronized (mMuxer) {
mMuxer.writeSampleData(trackIndex, encodedData, bufferInfo);
}
@ -112,8 +113,8 @@ public class AndroidMuxer {
public void stopMuxer() {
mMuxer.stop();
mMuxer.release();
if (mRecordTime > 0 && mRecordTime < System.currentTimeMillis() && !TextUtils.isEmpty(mCurrentPath)) {
insertToDB(mRecordTime, mCurrentPath);
if (mStartRecordTime > 0 && mStartRecordTime < System.currentTimeMillis() && !TextUtils.isEmpty(mCurrentPath)) {
insertToDB(mStartRecordTime, mCurrentPath);
}
}
}

@ -0,0 +1,336 @@
package com.yinuo.library.vlc.encoder;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import androidx.annotation.NonNull;
import com.common.commonlib.utils.LogUtils;
import org.easydarwin.PushHelper;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.concurrent.ArrayBlockingQueue;
@TargetApi(Build.VERSION_CODES.M)
public class MediaCodecManager {
public static boolean DEBUG = false;
private static final String TAG = "MediaCodecManager";
// parameters for the encoder
private static final String MIME_TYPE = "video/hevc"; // H.264 Advanced Video
//
private ArrayBlockingQueue<byte[]> frameBytes;
private MediaCodec mMediaCodec;
private int mColorFormat;
private long mStartTime = 0;
private MediaFormat mediaFormat;
private volatile boolean isStart = false;
private volatile boolean isPause = false;
private int dstWidth, dstHeight;
private static MediaCodecManager sInstance;
private HandlerThread mHandlerThread;
private boolean isInitCodec;
private boolean isFlush = false;
private long lastPauseTime = -1;//上次暂停时间
private boolean isHasKeyFrame;
private Handler mHandler;
private int off_y = 50, off_x = 100;
private int rotation;
private AndroidMuxer androidMuxer;
protected int mTrackIndex = -1;
private MediaCodecManager() {
mHandlerThread = new HandlerThread("codecThread");
mHandlerThread.start();
mHandler = new Handler(mHandlerThread.getLooper());
}
public static MediaCodecManager getInstance() {
if (sInstance == null) {
synchronized (MediaCodecManager.class) {
if (sInstance == null)
sInstance = new MediaCodecManager();
}
}
return sInstance;
}
/**
*
*
* @param dstWidth
* @param dstHeight
*/
public void initCodecManager(int dstWidth, int dstHeight, int rotation) {
isFlush = false;
if (!isInitCodec) {
mHandler.post(() -> {
frameBytes = new ArrayBlockingQueue<>(100);
MediaCodecManager.this.dstWidth = dstWidth;
MediaCodecManager.this.dstHeight = dstHeight;
MediaCodecManager.this.rotation = rotation;
prepare();
isInitCodec = true;
});
}
}
/**
*
*/
public void startMediaCodec() {
if (androidMuxer == null) {
androidMuxer = new AndroidMuxer();
}
if (isStart) {
LogUtils.w("startMediaCodec: was started");
return;
}
mHandler.post(() -> {
start();
});
}
long pauseTime;
/**
*
*/
public void pauseMediaCodec() {
if (!isStart || isPause) {
LogUtils.w(TAG, "MediaCodec: isn't started");
return;
}
if (androidMuxer != null) {
androidMuxer.release();
androidMuxer = null;
}
isPause = true;
pauseTime = System.nanoTime();
frameBytes.clear();
}
private long mTime;
/**
*
*/
public void resumeMediaCodec() {
if (!isStart || !isPause) {
LogUtils.w(TAG, "MediaCodec: was started");
return;
}
isPause = false;
mTime += System.nanoTime() - pauseTime;
}
public void releaseManager() {
mHandler.post(() -> {
if (mMediaCodec != null) {
frameBytes.clear();
mHandlerThread.quit();
// YuvOsdUtils.releaseOsd();
stopMediaCodec();
sInstance = null;
if (androidMuxer != null) {
androidMuxer.release();
}
}
});
}
public void addFrameData(byte[] data) {
if (isStart && !isPause) {
boolean isOffer = frameBytes.offer(data);
// Logger1.i(TAG, "addFrameData: isOffer=%s", isOffer);
if (!isOffer) {
frameBytes.poll();
frameBytes.offer(data);
}
}
}
/**
*
* <p>
* YV12: YYYYYYYY VV UU =>YUV420P
* NV12: YYYYYYYY UVUV =>YUV420SP
* NV21: YYYYYYYY VUVU =>YUV420SP
* create at 2017/3/22 18:13
*/
private void prepare() {
mColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;//nv12 最终数据需要nv12
int videoW = rotation == 90 || rotation == 270 ? dstHeight : dstWidth;
int videoH = rotation == 90 || rotation == 270 ? dstWidth : dstHeight;
mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE,//注意这里旋转后有一个大坑就是要交换mHeight和mWidth的位置。否则录制出来的视频时花屏的。
videoW, videoH);
int frameRate = 25; // 15fps
int compressRatio = 256;
int bitRate = dstWidth * dstHeight * 3 * 8 * frameRate / compressRatio;
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
LogUtils.w("prepare format: " + mediaFormat);
}
public static final int NAL_I = 19;
public static final int NAL_VPS = 32;
private byte[] vps_sps_pps_buf;
byte[] outData;
private void start() {
if (!isInitCodec)
throw new RuntimeException("initCodec is false,please call initCodecManager() before");
if (isStart) {
LogUtils.w(TAG, "startMediaCodec: was started");
return;
}
try {
LogUtils.w(TAG, "startMediaCodec: starting");
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(mediaFormat, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
byte[] data = null;
try {
data = frameBytes.take();
} catch (InterruptedException e) {
e.printStackTrace();
return;
}
ByteBuffer inputBuffer = codec.getInputBuffer(index);
if (inputBuffer == null)
return;
inputBuffer.clear();
inputBuffer.put(data);
long currentTimeUs = (System.nanoTime() - mTime) / 1000;//通过控制时间轴,达到暂停录制,继续录制的效果
codec.queueInputBuffer(index, 0, data.length, currentTimeUs, 0);
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
if (!isHasKeyFrame && info.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
isHasKeyFrame = true;
}
if (outData == null){
outData = new byte[info.size];
}
if (outputBuffer == null)
return;
else if (info.presentationTimeUs < lastPauseTime || !isHasKeyFrame) {//上一视频的数据,或者无关键帧,丢弃
//视频第一帧一定要是关键帧
outputBuffer.get(outData);
} else if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
LogUtils.w(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
} else {
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
if (androidMuxer != null) {
androidMuxer.writeSampleData(mTrackIndex, outputBuffer, info);
}
int offset = 4;
if (outData[2] == 0x01) {
offset = 3;
}
int type = (outData[offset] & 0x7E) >> 1;
if (type == NAL_VPS) {
vps_sps_pps_buf = outData;
PushHelper.INSTANCE.pushData(outData, outData.length, info.presentationTimeUs / 1000);
} else if (type == NAL_I) {
if (vps_sps_pps_buf != null) {
byte[] newBuf = new byte[vps_sps_pps_buf.length + outData.length];
System.arraycopy(vps_sps_pps_buf, 0, newBuf, 0, vps_sps_pps_buf.length);
System.arraycopy(outData, 0, newBuf, vps_sps_pps_buf.length, outData.length);
outData = newBuf;
PushHelper.INSTANCE.pushData(outData, outData.length, info.presentationTimeUs / 1000);
}
}
}
codec.releaseOutputBuffer(index, false);
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
MediaFormat newFormat = mMediaCodec.getOutputFormat();
mTrackIndex = androidMuxer.addTrack(newFormat);
}
}, mHandler);
mMediaCodec.start();
} catch (Exception e) {
return;
}
isStart = true;
}
/**
*
*/
public synchronized void flushMediaCodec() {
LogUtils.w(TAG, "flushMediaCodec");
frameBytes.clear();
isFlush = true;
lastPauseTime = (System.nanoTime()) / 1000;//记录
isHasKeyFrame = false;
}
private void stopMediaCodec() {
if (isStart && mMediaCodec != null) {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
isStart = false;
isPause = true;
LogUtils.w(TAG, "stopMediaCodec video");
}
private int getIndex(char c) {
if (c >= '0' && c <= '9')
return c - '0';
else if (c == '-')
return 10;
else if (c == ' ')
return 11;
else if (c == ':')
return 12;
return 11;
}
}

@ -32,12 +32,14 @@ object TxtOverlay {
// 文字生成的bitmap
private var bmp: Bitmap? = null
private var yuv : ByteArray?= null
// 时间格式化字符串
private val dateFormat = SimpleDateFormat("yy-MM-dd HH:mm:ss")
fun buildOverlayBitmap(): Bitmap? {
fun buildOverlayBitmap() {
if (TextUtils.isEmpty(mToDoShowTip)) {
return null
return
}
val currentTimeMillis = System.currentTimeMillis()
// 限制获取bitmap的频率保证性能
@ -53,14 +55,18 @@ object TxtOverlay {
)
// 缩放旋转bitmap
// bmp = YUVUtils.rotateImage(bmp, 0);
yuv = YUVUtils.getYUVByBitmap(bmp)
}
return bmp
}
fun getOverlayBitmap(): Bitmap? {
return bmp;
}
fun getOverlayYuv(): ByteArray? {
return yuv;
}
fun setTipChangeListener(onChange: () -> Unit) {
mTipChangeListener = onChange
}

Loading…
Cancel
Save