desc:OOM问题

main
xiaowusky 1 year ago
parent 4f42e76383
commit bb43fdbecb

@ -1,17 +1,6 @@
package com.yinuo.safetywatcher.watcher.utils;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
public class SnUtils {
public static String getDeviceSN() {
@ -25,394 +14,4 @@ public class SnUtils {
}
return serial;
}
public static int[] yuvI420toARGB(byte[] i420, int width, int height) {
int framesize = width * height;
int[] rgb = new int[framesize];//新图大小
for (int i = 0; i < framesize; i++) {
int index = (i / width) / 2 * (width / 2) + (i % width) / 2;
int y = i420[i] & 0x000000ff;//i420 y分量的值大小是一个字节准换成int
int u = i420[framesize + index] & 0x000000ff;//i420 u分量
int v = i420[framesize + framesize / 4 + index] & 0x000000ff;//i420 v
/*yuv----rgb 转换公式*/
int b = (int) (y + 1.8556 * (u - 128));
int g = (int) (y - (0.4681 * (v - 128) + 0.1872 * (u - 128)));
int r = (int) (y + 1.5748 * (v - 128));
/*防止越界*/
b = (b > 255) ? 255 : ((b < 0) ? 0 : b);
g = (g > 255) ? 255 : ((g < 0) ? 0 : g);
r = (r > 255) ? 255 : ((r < 0) ? 0 : r);
rgb[i] = (0xff000000) | (0x00ff0000 & r << 16) | (0x0000ff00 & g << 8) | (0x000000ff & b);
}
return rgb;
}
static byte[] intArrToByteArrLittleEndianBySystemApi(int[] array) {
ByteBuffer byteBuffer = ByteBuffer.allocate(array.length * 4);
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
IntBuffer intBuffer = byteBuffer.asIntBuffer();
intBuffer.put(array);
return byteBuffer.array();
}
static byte[] intArrToByteArrBigEndianBySystemApi(int[] array) {
ByteBuffer byteBuffer = ByteBuffer.allocate(array.length * 4);
byteBuffer.order(ByteOrder.BIG_ENDIAN);
IntBuffer intBuffer = byteBuffer.asIntBuffer();
intBuffer.put(array);
return byteBuffer.array();
}
static byte[] yuv = new byte[1920 * 1080 * 3 / 2];
public static byte[] rgb2YCbCr420(int[] pixels, int width, int height) {
int len = width * height;
// yuv格式数组大小y亮度占len长度u、v各占len/4长度
int y, u, v;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
// 屏蔽ARGB的透明度
int rgb = pixels[i * width + j] & 0x00FFFFFF;
// 像素的颜色顺序为bgr移位运算
int r = rgb & 0xFF;
int g = (rgb >> 8) & 0xFF;
int b = (rgb >> 16) & 0xFF;
// 套用公式
y = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
// 调整
y = y < 16 ? 16 : (y > 255 ? 255 : y);
u = u < 0 ? 0 : (u > 255 ? 255 : u);
v = v < 0 ? 0 : (v > 255 ? 255 : v);
// 复制
yuv[i * width + j] = (byte) y;
yuv[len + (i >> 1) * width + (j & ~1) + 0] = (byte) u;
yuv[len + +(i >> 1) * width + (j & ~1) + 1] = (byte) v;
}
}
return yuv;
}
public static void nv21ToRgba(byte[] input, int width, int height, byte[] output, boolean isRGB) {
int nvOff = width * height;
int i, j, yIndex = 0;
int y, u, v;
int r, g, b, nvIndex = 0;
//遍历图像每一个像素点依次计算r,g,b的值。
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++, ++yIndex) {
//对于位置为i,j的像素找到对应的y,u,v的值
nvIndex = (i / 2) * width + j - j % 2;
y = input[yIndex] & 0xff;
u = input[nvOff + nvIndex] & 0xff;
v = input[nvOff + nvIndex + 1] & 0xff;
//对于位置为i,j的像素根据其yuv的值计算出r,g,b的值
r = y + ((351 * (v - 128)) >> 8); //r
g = y - ((179 * (v - 128) + 86 * (u - 128)) >> 8); //g
b = y + ((443 * (u - 128)) >> 8); //b
//要求的rgb值是byte类型范围是0--255消除越界的值。
r = ((r > 255) ? 255 : (r < 0) ? 0 : r);
g = ((g > 255) ? 255 : (g < 0) ? 0 : g);
b = ((b > 255) ? 255 : (b < 0) ? 0 : b);
// 对结果rgb/bgr的值赋值a通道表示透明度这里都给255根据你自己的场景设置
//由于rgba格式和bgra格式只是r,g,b的顺序不同因此这里通过bool值判断既可以得到rgba的格式也可以得到bgra格式的数组。
if (isRGB) {
output[yIndex * 4 + 0] = (byte) b;
output[yIndex * 4 + 1] = (byte) g;
output[yIndex * 4 + 2] = (byte) r;
output[yIndex * 4 + 3] = (byte) 255;
} else {
output[yIndex * 4 + 0] = (byte) r;
output[yIndex * 4 + 1] = (byte) g;
output[yIndex * 4 + 2] = (byte) b;
output[yIndex * 4 + 3] = (byte) 255;
}
}
}
}
public static Bitmap nv12ToBitmap2(byte[] nv12Data, int width, int height) {
int[] rgbaData = new int[width * height];
int frameSize = width * height;
int yOffset = 0;
int uvOffset = frameSize;
for (int j = 0; j < height; j++) {
int rgbIndex = j * width;
int uvIndex = uvOffset + (j >> 1) * width;
for (int i = 0; i < width; i++) {
int y = nv12Data[yOffset + i] & 0xFF;
int uv = (nv12Data[uvIndex + (i & ~1)] & 0xFF) - 128;
int v = (nv12Data[uvIndex + (i & ~1) + 1] & 0xFF) - 128;
int r = (int) (y + 1.402 * v);
int g = (int) (y - 0.344136 * uv - 0.714136 * v);
int b = (int) (y + 1.772 * uv);
r = Math.max(0, Math.min(r, 255));
g = Math.max(0, Math.min(g, 255));
b = Math.max(0, Math.min(b, 255));
rgbaData[rgbIndex++] = Color.argb(255, r, g, b);
}
yOffset += width;
}
return Bitmap.createBitmap(rgbaData, width, height, Bitmap.Config.ARGB_8888);
}
public static Bitmap nv12ToBitmap(byte[] data, int w, int h) {
return spToBitmap(data, w, h, 0, 1);
}
public static Bitmap nv21ToBitmap(byte[] data, int w, int h) {
return spToBitmap(data, w, h, 1, 0);
}
static int[] colors = null;
private static Bitmap spToBitmap(byte[] data, int w, int h, int uOff, int vOff) {
int plane = w * h;
if (colors == null){
colors = new int[plane];
}
int yPos = 0, uvPos = plane;
for (int j = 0; j < h; j++) {
for (int i = 0; i < w; i++) {
// YUV byte to RGB int
final int y1 = data[yPos] & 0xff;
final int u = (data[uvPos + uOff] & 0xff) - 128;
final int v = (data[uvPos + vOff] & 0xff) - 128;
final int y1192 = 1192 * y1;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r);
g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g);
b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b);
colors[yPos] = ((r << 6) & 0xff0000) |
((g >> 2) & 0xff00) |
((b >> 10) & 0xff);
if ((yPos++ & 1) == 1) uvPos += 2;
}
if ((j & 1) == 0) uvPos -= w;
}
return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565);
}
public static Bitmap i420ToBitmap(byte[] data, int w, int h) {
return pToBitmap(data, w, h, true);
}
public static Bitmap yv12ToBitmap(byte[] data, int w, int h) {
return pToBitmap(data, w, h, false);
}
private static Bitmap pToBitmap(byte[] data, int w, int h, boolean uv) {
int plane = w * h;
int[] colors = new int[plane];
int off = plane >> 2;
int yPos = 0, uPos = plane + (uv ? 0 : off), vPos = plane + (uv ? off : 0);
for (int j = 0; j < h; j++) {
for (int i = 0; i < w; i++) {
// YUV byte to RGB int
final int y1 = data[yPos] & 0xff;
final int u = (data[uPos] & 0xff) - 128;
final int v = (data[vPos] & 0xff) - 128;
final int y1192 = 1192 * y1;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
r = (r < 0) ? 0 : ((r > 262143) ? 262143 : r);
g = (g < 0) ? 0 : ((g > 262143) ? 262143 : g);
b = (b < 0) ? 0 : ((b > 262143) ? 262143 : b);
colors[yPos] = ((r << 6) & 0xff0000) |
((g >> 2) & 0xff00) |
((b >> 10) & 0xff);
if ((yPos++ & 1) == 1) {
uPos++;
vPos++;
}
}
if ((j & 1) == 0) {
uPos -= (w >> 1);
vPos -= (w >> 1);
}
}
return Bitmap.createBitmap(colors, w, h, Bitmap.Config.RGB_565);
// return Bitmap.createBitmap(colors, w, h, Bitmap.Config.ARGB_8888);
}
// public static byte[] nv12ToRgba(byte[] nv12Data, int width, int height) {
// int nv12Size = width * height * 12;
// byte[] rgbaData = new byte[nv12Size + 4 * (width * height)];
//
// int offset = 0;
// for (int y = 0; y < height; y++) {
// for (int x = 0; x < width; x++) {
// int nv12Offset = (y * width + x) * 12;
// int yValue = (nv12Data[nv12Offset] & 0xFF) << 8 | (nv12Data[nv12Offset + 1] & 0xFF);
// int uValue = (nv12Data[nv12Offset + 2] & 0xFF) << 8 | (nv12Data[nv12Offset + 3] & 0xFF);
// int vValue = (nv12Data[nv12Offset + 4] & 0xFF) << 8 | (nv12Data[nv12Offset + 5] & 0xFF);
//
// int r = (yValue + 16) >> 16;
// int g = ((66 * uValue + 128) >> 16) + r;
// int b = ((49 * vValue + 128) >> 16) + g;
// int a = 255;
//
// rgbaData[offset++] = (byte) r;
// rgbaData[offset++] = (byte) g;
// rgbaData[offset++] = (byte) b;
// rgbaData[offset++] = (byte) a;
// }
// }
//
// return rgbaData;
// }
public static byte[] nv12ToRGBA(byte[] nv12Data, int width, int height) {
byte[] rgbaData = new byte[width * height * 4];
int frameSize = width * height;
int nv12Index = 0;
int rgbaIndex = 0;
int yOffset = 0;
int uvOffset = frameSize;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
int y = (nv12Data[yOffset + i] & 0xFF);
int uvIndex = uvOffset + (i & ~1) + (j >> 1) * width;
int u = (nv12Data[uvIndex] & 0xFF) - 128;
int v = (nv12Data[uvIndex + 1] & 0xFF) - 128;
int r = (int) (y + 1.402 * v);
int g = (int) (y - 0.344136 * u - 0.714136 * v);
int b = (int) (y + 1.772 * u);
r = Math.max(0, Math.min(r, 255));
g = Math.max(0, Math.min(g, 255));
b = Math.max(0, Math.min(b, 255));
rgbaData[rgbaIndex++] = (byte) r;
rgbaData[rgbaIndex++] = (byte) g;
rgbaData[rgbaIndex++] = (byte) b;
rgbaData[rgbaIndex++] = (byte) 255;
}
yOffset += width;
}
return rgbaData;
}
public static void convertYUV420SemiPlanarToNV12(byte[] yuv420spData, byte[] nv12Data, int width, int height) {
int frameSize = width * height;
int chromaOffset = frameSize;
// 将 Y 分量复制到 NV12 数据中
System.arraycopy(yuv420spData, 0, nv12Data, 0, frameSize);
// 分离 U 和 V 分量,并交错复制到 NV12 数据中
for (int i = 0, j = 0; i < frameSize / 2; i += 2, j++) {
nv12Data[chromaOffset + j] = yuv420spData[frameSize + i]; // 复制 U 分量
nv12Data[chromaOffset + j + 1] = yuv420spData[frameSize + i + 1]; // 复制 V 分量
}
}
public static Bitmap i420To(byte[] i420, int width, int height){
YuvImage yuvImage = new YuvImage(i420, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, outputStream);
byte[] jpegData = outputStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
return bitmap;
}
public static Bitmap i420To2(byte[] yuv420spData){
int width = 1920; // 输入视频的宽度
int height = 1080; // 输入视频的高度
int uvPixelStride = 2; // 对于U和V分量交错存储的情况PixelStride为2
int[] argbData = new int[width * height];
int yIndex = 0;
int uvIndex = width * height;
for (int y = 0; y < height; y++) {
int pY = yIndex;
int pUV = uvIndex;
for (int x = 0; x < width; x++) {
int uv = (y >> 1) * 2 * width + (x & ~1) * uvPixelStride;
int Y = yuv420spData[pY++] & 0xFF;
int U = (yuv420spData[pUV] & 0xFF) - 128;
int V = (yuv420spData[pUV + 1] & 0xFF) - 128;
int R = Math.max(0, Math.min(255, Y + (int) (1.402f * V)));
int G = Math.max(0, Math.min(255, Y - (int) (0.344f * U + 0.714f * V)));
int B = Math.max(0, Math.min(255, Y + (int) (1.772f * U)));
int color = 0xFF000000 | (R << 16) | (G << 8) | B;
argbData[y * width + x] = color;
if (x % 2 == 1) {
pUV += uvPixelStride;
}
}
if (y % 2 == 1) {
uvIndex += width * uvPixelStride;
}
}
return Bitmap.createBitmap(argbData, width, height, Bitmap.Config.ARGB_8888);
}
public static Bitmap i420To3(byte[] yuv420spData){
int width = 1024;
int height = 576;
int[] argb = new int[width * height];
int pY = 0;
int pUV = width * height;
for (int i = 0, startY = 0; i < height; i++, startY += width) {
int uvIndex = (i / 2) * width;
for (int j = 0; j < width; j++) {
int uvOffset = (j / 2) << 1;
int u = (yuv420spData[pUV + uvIndex + uvOffset] & 0xFF) - 128;
int v = (yuv420spData[pUV + uvIndex + uvOffset + 1] & 0xFF) - 128;
int y = yuv420spData[pY + j] & 0xFF;
int r = Math.round(y + 1.402f * v);
int g = Math.round(y - 0.3441f * u - 0.7141f * v);
int b = Math.round(y + 1.772f * u);
r = Math.max(0, Math.min(255, r));
g = Math.max(0, Math.min(255, g));
b = Math.max(0, Math.min(255, b));
argb[startY + j] = 0xFF000000 | (r << 16) | (g << 8) | b;
}
}
return Bitmap.createBitmap(argb, width, height, Bitmap.Config.ARGB_8888);
}
}

@ -0,0 +1,26 @@
package org.easydarwin.util;
public class ByteArrUtils {
private static ByteArrUtils INSTANCE = new ByteArrUtils();// CrashHandler实例
public static ByteArrUtils getInstance() {
return INSTANCE;
}
private byte[] in;
private byte[] nv12Data;
private ByteArrUtils() {
in = new byte[1920 * 1080 * 3 / 2];
nv12Data = new byte[1920 * 1080 * 3 / 2];
}
public byte[] getInArr() {
return in;
}
public byte[] getNv12DataArr() {
return nv12Data;
}
}

@ -30,6 +30,7 @@ import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.OnLifecycleEvent;
import org.easydarwin.sw.JNIUtil;
import org.easydarwin.util.ByteArrUtils;
import org.easydarwin.util.CodecSpecificDataUtil;
import org.easydarwin.util.TextureLifecycler;
@ -807,8 +808,8 @@ public class EasyPlayerClient implements Client.SourceCallBack {
return null;
}
byte[] in = new byte[1920 * 1080 * 3 / 2];
byte[] nv12Data = new byte[1920 * 1080 * 3 / 2];
// byte[] in = new byte[1920 * 1080 * 3 / 2];
// byte[] nv12Data = new byte[1920 * 1080 * 3 / 2];
VideoCodec.VideoDecoderLite displayer = null;
ByteBuffer displayTmp = null;
@ -967,6 +968,8 @@ public class EasyPlayerClient implements Client.SourceCallBack {
|| mColorFormat == COLOR_TI_FormatYUV420PackedSemiPlanar) {
mIndex = mIndex + 1;
if (mIndex < 3) {
byte[] in = ByteArrUtils.getInstance().getInArr();
byte[] nv12Data = ByteArrUtils.getInstance().getNv12DataArr();
outputBuffer.clear();
outputBuffer.get(in);
System.arraycopy(in, 0, nv12Data, 0, in.length);

Loading…
Cancel
Save