desc:add library-rtsp

main
xiaowusky 2 years ago
parent 129ad1e9fa
commit 096556b85b

@ -69,8 +69,9 @@ dependencies {
implementation project(path: ':library-ijkplayer')
implementation project(path: ':library-serialPort')
implementation project(path: ':library-common')
implementation project(path: ':library-vlc')
implementation(name: 'libvlc-3.0.0', ext: 'aar')
implementation project(path: ':library-rtsp')
// implementation project(path: ':library-vlc')
// implementation(name: 'libvlc-3.0.0', ext: 'aar')
//excel
implementation rootProject.ext.dependencies.jxl
//

@ -10,7 +10,6 @@ import com.common.commonlib.db.DBUtils
import com.common.commonlib.db.entity.Gas
import com.common.commonlib.db.entity.Video
import com.common.commonlib.db.entity.Warning
import com.yinuo.library.vlc.PushHelper
import com.yinuo.safetywatcher.watcher.net.UploadFileApi
import com.yinuo.safetywatcher.watcher.ui.view.CommonDialog
import com.yinuo.safetywatcher.watcher.utils.DateUtils
@ -18,6 +17,7 @@ import com.yinuo.safetywatcher.watcher.xls.SimpleCellValue
import com.yinuo.safetywatcher.watcher.xls.utils.ExcelUtils
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import org.easydarwin.PushHelper
import java.io.BufferedReader
import java.io.DataOutputStream
import java.io.File

@ -6,9 +6,8 @@ import com.common.commonlib.db.DBUtils
import com.common.commonlib.db.entity.Gas
import com.common.commonlib.db.entity.GasType
import com.common.commonlib.db.entity.Warning
import com.common.commonlib.utils.LogUtils
import com.common.serialport.utils.HexUtils
import com.yinuo.library.vlc.TxtOverlay
import com.yinuo.library.vlc.utils.LogUtils
import com.yinuo.safetywatcher.watcher.constant.GAS_CLOUD_UPLOAD_SIZE_ONCE
import com.yinuo.safetywatcher.watcher.net.DevicesApi
import com.yinuo.safetywatcher.watcher.port.cmd.CH4
@ -21,6 +20,7 @@ import com.yinuo.safetywatcher.watcher.port.cmd.getGasTypeByCode
import kotlinx.coroutines.DelicateCoroutinesApi
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.launch
import org.easydarwin.TxtOverlay
import kotlin.math.pow
object ParseHelper {

@ -6,7 +6,7 @@ import android.view.View
import com.common.commonlib.db.DBUtils
import com.common.commonlib.db.entity.Gas
import com.common.commonlib.db.entity.Video
import com.yinuo.library.vlc.utils.LogUtils
import com.common.commonlib.utils.LogUtils
import com.yinuo.safetywatcher.R
import com.yinuo.safetywatcher.databinding.ActivityCloudBinding
import com.yinuo.safetywatcher.watcher.base.NoOptionsActivity

@ -7,11 +7,8 @@ import android.view.View
import androidx.annotation.RequiresApi
import androidx.lifecycle.lifecycleScope
import com.common.commonlib.db.DBUtils
import com.common.commonlib.utils.LogUtils
import com.common.commonlib.utils.NetworkHelper
import com.yinuo.library.vlc.IFrameCallBack
import com.yinuo.library.vlc.RtspSurfaceRender2
import com.yinuo.library.vlc.TxtOverlay
import com.yinuo.library.vlc.utils.LogUtils
import com.yinuo.safetywatcher.R
import com.yinuo.safetywatcher.databinding.ActivityHomeBinding
import com.yinuo.safetywatcher.watcher.AppData
@ -30,7 +27,10 @@ import com.yinuo.safetywatcher.watcher.utils.SimHelper
import com.yinuo.safetywatcher.watcher.utils.WifiHelper
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import org.easydarwin.TxtOverlay
import org.easydarwin.video.EasyPlayerClient
import java.io.File
class HomeActivity : NoOptionsActivity() {
@ -41,8 +41,6 @@ class HomeActivity : NoOptionsActivity() {
ActivityHomeBinding.inflate(layoutInflater)
}
private var mRender: RtspSurfaceRender2? = null
override val isHome: Boolean
get() = true
@ -77,7 +75,7 @@ class HomeActivity : NoOptionsActivity() {
cameraSwitch.performClick()
}
cameraSwitch.setOnCheckedChangeListener { buttonView, isChecked ->
if (isChecked) mRender?.startRecording() else mRender?.stopRecording()
// if (isChecked) mRender?.startRecording() else mRender?.stopRecording()
}
itemSetting.post {
@ -100,19 +98,21 @@ class HomeActivity : NoOptionsActivity() {
* 设置摄像头
*/
private fun setForCamera() {
mRender = RtspSurfaceRender2(mBinding.surface)
mRender?.setRtspUrl(CAMERA_URL)
mRender?.setFrameCallBack(object : IFrameCallBack {
override fun onFrame() {
AppData.setCameraData(true)
changeViewStatus()
closeLoadingDialog()
lifecycleScope.launch {
delay(300)
val client = EasyPlayerClient(this@HomeActivity, mBinding.surface, null) {
if (!AppData.hasCameraData()) {
AppData.setCameraData(true)
changeViewStatus()
closeLoadingDialog()
}
watchCamera(DELAY_TIME_CHECK_CAMERA)
}
})
// 第一次很慢所以10秒
watchCamera(DELAY_TIME_OPEN_CAMERA)
showLoadingDialog(R.string.connecting_camera)
client.play(CAMERA_URL)
// 第一次很慢所以10秒
watchCamera(DELAY_TIME_OPEN_CAMERA)
showLoadingDialog(R.string.connecting_camera)
}
}
/**
@ -145,7 +145,7 @@ class HomeActivity : NoOptionsActivity() {
if (!isLoadingShowing() && !AppData.hasSensorData()) {
showLoadingDialog(R.string.connecting_camera)
}
mRender?.reStart()
// mRender?.reStart()
watchCamera(DELAY_TIME_OPEN_CAMERA)
} else {
watchCamera(DELAY_TIME_CHECK_CAMERA)
@ -246,18 +246,7 @@ class HomeActivity : NoOptionsActivity() {
WifiHelper.release(this@HomeActivity)
SimHelper.release(this@HomeActivity)
mBinding.root?.removeCallbacks(reStartCamera)
mRender?.onDestoryed()
warnDialog?.dismiss()
Process.killProcess(Process.myPid())
}
override fun onResume() {
super.onResume()
mRender?.onResume()
}
override fun onStop() {
super.onStop()
mRender?.onPause()
}
}

@ -2,6 +2,7 @@ package com.yinuo.safetywatcher.watcher.utils
import android.os.StatFs
import com.lztek.toolkit.Lztek
import org.easydarwin.PushHelper
import java.math.BigInteger
import java.security.MessageDigest

@ -3,7 +3,7 @@
android:layout_width="match_parent"
android:layout_height="match_parent">
<SurfaceView
<TextureView
android:id="@+id/surface"
android:layout_width="match_parent"
android:layout_height="match_parent" />

@ -0,0 +1,61 @@
package com.common.commonlib.utils;
import android.util.Log;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
/**
* Created by liwentian on 17/8/16.
*/
public class LogUtils {
private static final String TAG = "watcher";
public static void v(String msg) {
Log.v(TAG, msg);
}
public static void v(String tag, String msg) {
Log.v(tag, msg);
}
public static void e(String msg) {
Log.e(TAG, msg);
}
public static void e(String tag, String msg) {
Log.e(tag, msg);
}
public static void w(String msg) {
Log.w(TAG, msg);
}
public static void w(String tag, String msg) {
Log.w(tag, msg);
}
public static void e(Throwable e) {
String s = getThrowableString(e);
e(s);
}
private static String getThrowableString(Throwable e) {
Writer writer = new StringWriter();
PrintWriter printWriter = new PrintWriter(writer);
while (e != null) {
e.printStackTrace(printWriter);
e = e.getCause();
}
String text = writer.toString();
printWriter.close();
return text;
}
}

@ -14,6 +14,7 @@ repositories {
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation project(path: ':library-common')
testImplementation 'junit:junit:4.13.2'
implementation('androidx.core:core:1.10.1')

@ -0,0 +1,97 @@
package org.easydarwin
import android.net.Uri
import com.common.commonlib.CommonApplication
import com.common.commonlib.utils.LogUtils
import org.easydarwin.push.EasyPusher
import org.easydarwin.push.InitCallback
import org.easydarwin.push.Pusher
object PushHelper {
private val mPusher: EasyPusher by lazy {
EasyPusher()
}
private val mApplicationContext = CommonApplication.getContext()
private var mIp: String? = null
private var mPort: String? = null
private var mId: String? = null
private var mInitialized = false
var callback = InitCallback { code ->
var msg = ""
when (code) {
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_INVALID_KEY -> msg = "无效Key"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_SUCCESS -> msg = "未开始"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECTING -> msg = "连接中"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECTED -> msg = "连接成功"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECT_FAILED -> msg = "连接失败"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_CONNECT_ABORT -> msg =
"连接异常中断"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_PUSHING -> msg = "推流中"
EasyPusher.OnInitPusherCallback.CODE.EASY_PUSH_STATE_DISCONNECTED -> msg = "断开连接"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_PLATFORM_ERR -> msg = "平台不匹配"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_COMPANY_ID_LEN_ERR -> msg =
"授权使用商不匹配"
EasyPusher.OnInitPusherCallback.CODE.EASY_ACTIVATE_PROCESS_NAME_LEN_ERR -> msg =
"进程名称长度不匹配"
}
LogUtils.v("PushHelper. InitCallback $msg")
}
fun setPushUrl(url: String) {
val mUri = Uri.parse(url)
mIp = mUri.host
mPort = mUri.port.toString()
mId = mUri.path
if (mId?.startsWith("/")!!){
mId = mId!!.substring(1)
}
}
fun startStream(hevc: Boolean) {
stop()
initHelper(hevc)
}
fun stop() {
mPusher.stop()
mInitialized = false
}
fun pushData(h264: ByteArray, length: Int, timeStamp: Long) {
if (mIp.isNullOrEmpty() || mPort.isNullOrEmpty() || mId.isNullOrEmpty()) {
LogUtils.e("PushHelper error, please setPushUrl before!!")
return
}
if (!mInitialized) {
LogUtils.e("PushHelper error, please init first!!")
return
}
mPusher.push(h264, 0, length, timeStamp, 1)
}
private fun initHelper(hevc: Boolean) {
if (mIp.isNullOrEmpty() || mPort.isNullOrEmpty() || mId.isNullOrEmpty()) {
LogUtils.e("PushHelper error, please setPushUrl first!!")
return
}
mPusher.initPush(mApplicationContext, callback)
mPusher.setMediaInfo(
if (hevc) Pusher.Codec.EASY_SDK_VIDEO_CODEC_H265 else Pusher.Codec.EASY_SDK_VIDEO_CODEC_H264,
24,
Pusher.Codec.EASY_SDK_AUDIO_CODEC_AAC,
1,
8000,
16
)
mPusher.start(mIp, mPort, mId, Pusher.TransType.EASY_RTP_OVER_TCP)
mInitialized = true
}
}

@ -0,0 +1,67 @@
package org.easydarwin
import android.graphics.Bitmap
import android.graphics.Color
import android.text.TextUtils
import org.easydarwin.util.YUVUtils
import java.text.SimpleDateFormat
/**
* Created by John on 2017/2/23.
*/
object TxtOverlay {
// 提示变化的监听
private var mTipChangeListener: (() -> Unit)? = null;
// 上一次展示的提示文字
private var mLastShowTip = ""
// 待展示的提示文字
private var mToDoShowTip = ""
// 外部调用,设置待显示水印文字
fun setShowTip(string: String) {
mToDoShowTip = string
buildOverlayBitmap()
mTipChangeListener?.invoke()
}
// 上一次展示的时间
private var lastTipUpdateTime: Long = 0
// 文字生成的bitmap
private var bmp: Bitmap? = null
// 时间格式化字符串
private val dateFormat = SimpleDateFormat("yy-MM-dd HH:mm:ss")
fun buildOverlayBitmap(): Bitmap? {
if (TextUtils.isEmpty(mToDoShowTip)) {
return null
}
val currentTimeMillis = System.currentTimeMillis()
// 限制获取bitmap的频率保证性能
if (TextUtils.isEmpty(mLastShowTip) || mToDoShowTip != mLastShowTip || currentTimeMillis - lastTipUpdateTime > 1000) {
// 记录更新时间和上一次的文字
lastTipUpdateTime = currentTimeMillis
mLastShowTip = mToDoShowTip
// // 回收内存
// bmp?.recycle()
// 文字转bitmap
bmp = YUVUtils.generateBitmap(
dateFormat.format(lastTipUpdateTime) + "@" + mToDoShowTip, 40, Color.WHITE
)
// 缩放旋转bitmap
// bmp = YUVUtils.rotateImage(bmp, 0);
}
return bmp
}
fun getOverlayBitmap(): Bitmap? {
return bmp;
}
fun setTipChangeListener(onChange: () -> Unit) {
mTipChangeListener = onChange
}
}

@ -1,129 +0,0 @@
package org.easydarwin.push;
import android.content.Context;
import android.util.Log;
import org.easydarwin.muxer.EasyMuxer;
import org.easydarwin.sw.JNIUtil;
import org.easydarwin.sw.X264Encoder;
import java.util.concurrent.ArrayBlockingQueue;
/**
* Created by apple on 2017/5/13.
*/
public class SWConsumer extends Thread implements VideoConsumer {
private static final String TAG = "SWConsumer";
private int mHeight;
private int mWidth;
private X264Encoder x264;
private final Pusher mPusher;
private volatile boolean mVideoStarted;
public SWConsumer(Context context, Pusher pusher){
mPusher = pusher;
}
@Override
public void onVideoStart(int width, int height) {
this.mWidth = width;
this.mHeight = height;
x264 = new X264Encoder();
int bitrate = (int) (mWidth*mHeight*20*2*0.07f);
x264.create(width, height, 20, bitrate/500);
mVideoStarted = true;
start();
}
class TimedBuffer {
byte[] buffer;
long time;
public TimedBuffer(byte[] data) {
buffer = data;
time = System.currentTimeMillis();
}
}
private ArrayBlockingQueue<TimedBuffer> yuvs = new ArrayBlockingQueue<TimedBuffer>(2);
private ArrayBlockingQueue<byte[]> yuv_caches = new ArrayBlockingQueue<byte[]>(10);
@Override
public void run(){
byte[]h264 = new byte[mWidth*mHeight*3/2];
byte[] keyFrm = new byte[1];
int []outLen = new int[1];
do {
try {
int r = 0;
TimedBuffer tb = yuvs.take();
byte[] data = tb.buffer;
long begin = System.currentTimeMillis();
r = x264.encode(data, 0, h264, 0, outLen, keyFrm);
if (r > 0) {
Log.i(TAG, String.format("encode spend:%d ms. keyFrm:%d", System.currentTimeMillis() - begin, keyFrm[0]));
// newBuf = new byte[outLen[0]];
// System.arraycopy(h264, 0, newBuf, 0, newBuf.length);
}
keyFrm[0] = 0;
yuv_caches.offer(data);
mPusher.push(h264, 0, outLen[0], tb.time, 1);
} catch (InterruptedException e) {
e.printStackTrace();
}
}while (mVideoStarted);
}
final int millisPerframe = 1000/20;
long lastPush = 0;
@Override
public int onVideo(byte[] data, int format) {
try {
if (lastPush == 0) {
lastPush = System.currentTimeMillis();
}
long time = System.currentTimeMillis() - lastPush;
if (time >= 0) {
time = millisPerframe - time;
if (time > 0) Thread.sleep(time / 2);
}
byte[] buffer = yuv_caches.poll();
if (buffer == null || buffer.length != data.length) {
buffer = new byte[data.length];
}
System.arraycopy(data, 0, buffer, 0, data.length);
JNIUtil.yuvConvert(buffer, mWidth, mHeight, 4);
yuvs.offer(new TimedBuffer(buffer));
if (time > 0) Thread.sleep(time / 2);
lastPush = System.currentTimeMillis();
}catch (InterruptedException ex){
ex.printStackTrace();
}
return 0;
}
@Override
public void onVideoStop() {
do {
mVideoStarted = false;
try {
interrupt();
join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}while (isAlive());
if (x264 != null) {
x264.close();
}
x264 = null;
}
@Override
public void setMuxer(EasyMuxer muxer) {
}
}

@ -1,81 +0,0 @@
package org.easydarwin.sw;
/**
*/
public class JNIUtil {
static {
System.loadLibrary("Utils");
}
/**
* YUV = 411 U V
*
* @param buffer
* @param width
* @param height
*/
public static void yV12ToYUV420P(byte[] buffer, int width, int height) {
callMethod("YV12ToYUV420P", null, buffer, width, height);
}
/**
* YU+V = 42,UV
*
* @param buffer
* @param width
* @param height
*/
public static void nV21To420SP(byte[] buffer, int width, int height) {
callMethod("NV21To420SP", null, buffer, width, height);
}
/**
* 1
*
* @param data
* @param offset
* @param width
* @param height
* @param degree
*/
public static void rotateMatrix(byte[] data, int offset, int width, int height, int degree) {
callMethod("RotateByteMatrix", null, data, offset, width, height, degree);
}
/**
* 2
*
* @param data
* @param offset
* @param width
* @param height
* @param degree
*/
public static void rotateShortMatrix(byte[] data, int offset, int width, int height, int degree) {
callMethod("RotateShortMatrix", null, data, offset, width, height, degree);
}
private static native void callMethod(String methodName, Object[] returnValue, Object... params);
/**
* 0 NULL,
* 1 yuv_to_yvu,
* 2 yuv_to_yuvuv,
* 3 yuv_to_yvuvu,
* 4 yuvuv_to_yuv,
* 5 yuvuv_to_yvu,
* 6 yuvuv_to_yvuvu,
*
* @param data
* @param width
* @param height
* @param mode
*/
public static native void yuvConvert(byte[] data, int width, int height, int mode);
}

@ -1,113 +0,0 @@
package org.easydarwin.sw;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.text.TextUtils;
import org.easydarwin.util.YUVUtils;
import java.io.File;
import java.text.SimpleDateFormat;
/**
* Created by John on 2017/2/23.
*/
public class TxtOverlay {
static {
System.loadLibrary("TxtOverlay");
}
private final Context context;
private final static int init_startX = 10;
private final static int init_startY = 50;
int startY = init_startX;//水印Y轴的位置
int startX = init_startY;//水印X轴的位置
String mTip = "";
long lastTipUpdateTime = 0;
Bitmap bmp;
byte[] mark;
SimpleDateFormat dateFormat = new SimpleDateFormat("yy-MM-dd HH:mm:ss");
public TxtOverlay(Context context) {
this.context = context;
}
private long ctx;
public void init(int width, int height, String fonts) {
if (TextUtils.isEmpty(fonts)) {
throw new IllegalArgumentException("the font file must be valid!");
}
if (!new File(fonts).exists()) {
throw new IllegalArgumentException("the font file must be exists!");
}
ctx = txtOverlayInit(width, height, fonts);
}
public void overlay(byte[] data,
String txt) {
if (ctx == 0) return;
txtOverlay(ctx, data, txt);
}
public void javaOverlay(byte[] data, int width, int height, int cameraRotationOffset,
String txt) {
if (TextUtils.isEmpty(txt)) {
return;
}
long currentTimeMillis = System.currentTimeMillis();
// 限制获取bitmap的频率保证性能
if (TextUtils.isEmpty(mTip) || (!txt.equals(mTip) || currentTimeMillis - lastTipUpdateTime > 1000)) {
// 记录更新时间和上一次的文字
lastTipUpdateTime = currentTimeMillis;
mTip = txt;
// 文字转bitmap
bmp = YUVUtils.generateBitmap(dateFormat.format(lastTipUpdateTime) + "@" + txt, 16, Color.WHITE);
// 缩放旋转bitmap
bmp = YUVUtils.rotateImage(bmp, cameraRotationOffset);
//转YUV
mark = YUVUtils.getYUVByBitmap(bmp);
}
// 根据旋转角度调整位置
int bmpWidth = bmp.getWidth();
int bmpHeight = bmp.getHeight();
if (cameraRotationOffset == 90) {
startY = height - init_startY - bmpHeight;
startX = init_startX;
} else if (cameraRotationOffset == 270) {
startY = init_startY;
startX = width - init_startX - bmpWidth;
}
// 替换YUV数据
int j = 0;
for (int i = startY; i < bmpHeight + startY; i++) {
for (int c = 0; c < bmpWidth; c++) {
//去掉PNG水印的黑边
if (mark[j * bmpWidth + c] != 0x10 && mark[j * bmpWidth + c] != 0x80 && mark[j * bmpWidth + c] != 0xeb) {
System.arraycopy(mark, j * bmpWidth + c, data, startX + i * width + c, 1);
}
}
j++;
}
}
public void release() {
if (ctx == 0) return;
txtOverlayRelease(ctx);
ctx = 0;
}
private static native long txtOverlayInit(int width, int height, String fonts);
private static native void txtOverlay(long ctx, byte[] data, String txt);
private static native void txtOverlayRelease(long ctx);
}

@ -0,0 +1 @@
/build

@ -0,0 +1,35 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 26
buildToolsVersion '28.0.3'
defaultConfig {
minSdkVersion 16
targetSdkVersion 26
versionCode 1
versionName "1.0"
consumerProguardFiles 'proguard-rules.pro'
buildConfigField 'boolean', 'MEDIA_DEBUG', 'false'
}
buildTypes {
release {
// minifyEnabled true
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'library.pro'
}
}
}
repositories {
flatDir {
dirs 'libs'
}
mavenCentral()
google()
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation 'androidx.annotation:annotation-jvm:+'
implementation 'androidx.core:core:1.10.1'
}

@ -0,0 +1,81 @@
#
# This ProGuard configuration file illustrates how to process a program
# library, such that it remains usable as a library.
# Usage:
# java -jar proguard.jar @library.pro
#
# Specify the input jars, output jars, and library jars.
# In this case, the input jar is the program library that we want to process.
# Save the obfuscation mapping to a file, so we can de-obfuscate any stack
# traces later on. Keep a fixed source file attribute and all line number
# tables to get line numbers in the stack traces.
# You can comment this out if you're not interested in stack traces.
-keepparameternames
-renamesourcefileattribute SourceFile
-keepattributes Exceptions,InnerClasses,Signature,Deprecated,
SourceFile,LineNumberTable,EnclosingMethod
# Preserve all annotations.
-keepattributes *Annotation*
# Preserve all public classes, and their public and protected fields and
# methods.
-keep public class * {
public protected *;
}
# Preserve all .class method names.
-keepclassmembernames class * {
java.lang.Class class$(java.lang.String);
java.lang.Class class$(java.lang.String, boolean);
}
# Preserve all native method names and the names of their classes.
-keepclasseswithmembernames class * {
native <methods>;
}
# Preserve the special static methods that are required in all enumeration
# classes.
-keepclassmembers class * extends java.lang.Enum {
public static **[] values();
public static ** valueOf(java.lang.String);
}
# Explicitly preserve all serialization members. The Serializable interface
# is only a marker interface, so it wouldn't save them.
# You can comment this out if your library doesn't use serialization.
# If your code contains serializable classes that have to be backward
# compatible, please refer to the manual.
-keepclassmembers class * implements java.io.Serializable {
static final long serialVersionUID;
static final java.io.ObjectStreamField[] serialPersistentFields;
private void writeObject(java.io.ObjectOutputStream);
private void readObject(java.io.ObjectInputStream);
java.lang.Object writeReplace();
java.lang.Object readResolve();
}
# Your library may contain more items that need to be preserved;
# typically classes that are dynamically created using Class.forName:
# -keep public class mypackage.MyClass
# -keep public interface mypackage.MyInterface
# -keep public class * implements mypackage.MyInterface
-keepclassmembers class org.easydarwin.video.EasyPlayerClient {
public *;
}
-keepclassmembers class org.easydarwin.video.Client {
private static void onRTSPSourceCallBack(int,int,int,byte[],byte[]);
}

@ -0,0 +1,26 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in D:\software\adt-bundle-windows-x86_64-20140702\sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# 忽略警告
-ignorewarning
-keepclassmembers class org.easydarwin.video.EasyPlayerClient {
public *;
}
-keepclassmembers class org.easydarwin.video.Client$FrameInfo{
*;
}

@ -0,0 +1,19 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.easydarwin.video">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.CHANGE_WIFI_MULTICAST_STATE"/>
<application
android:allowBackup="true"
android:label="@string/app_name"
android:supportsRtl="true">
</application>
</manifest>

@ -0,0 +1,17 @@
package org.easydarwin.audio;
/**
* Created by John on 2016/3/18.
*/
public class AudioCodec {
static {
System.loadLibrary("proffmpeg");
System.loadLibrary("AudioCodecer");
}
public static native long create(int codec, int sample_rate, int channels, int sample_bit);
public static native int decode(long handle, byte[] buffer, int offset, int length, byte[] pcm, int[] outLen);
public static native void close(long handle);
}

@ -0,0 +1,100 @@
package org.easydarwin.audio;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import org.easydarwin.video.EasyMuxer;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* EasyMuxerPCM
*/
public class EasyAACMuxer extends EasyMuxer {
MediaCodec mMediaCodec;
String TAG = "EasyAACMuxer";
protected MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
protected ByteBuffer[] mBuffers = null;
private MediaFormat mAudioFormat;
public EasyAACMuxer(String path, boolean hasAudio, long durationMillis) {
super(path, hasAudio, durationMillis);
}
@Override
public synchronized void addTrack(MediaFormat format, boolean isVideo) {
super.addTrack(format, isVideo);
if (!isVideo){
mAudioFormat = format;
}
}
public synchronized void pumpPCMStream(byte []pcm, int length, long timeUs) throws IOException {
if (mMediaCodec == null) {// 启动AAC编码器。这里用MediaCodec来编码
if (mAudioFormat == null) return;
mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
Log.i(TAG, String.valueOf(mAudioFormat));
mAudioFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 16000);
// mAudioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 320);
mMediaCodec.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
mBuffers = mMediaCodec.getOutputBuffers();
}
int index = 0;
// 将pcm编码成AAC
do {
index = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 1000);
if (index >= 0) {
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
continue;
}
if (mBufferInfo.presentationTimeUs == 0){
continue;
}
if (VERBOSE) Log.d(TAG,String.format("dequeueOutputBuffer data length:%d,tmUS:%d", mBufferInfo.size, mBufferInfo.presentationTimeUs));
ByteBuffer outputBuffer = mBuffers[index];
// ok,编码成功了。将AAC数据写入muxer.
pumpStream(outputBuffer, mBufferInfo, false);
mMediaCodec.releaseOutputBuffer(index, false);
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mBuffers = mMediaCodec.getOutputBuffers();
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.v(TAG, "output format changed...");
MediaFormat newFormat = mMediaCodec.getOutputFormat();
Log.v(TAG, "output format changed..." + newFormat);
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.v(TAG, "No buffer available...");
} else {
Log.e(TAG, "Message: " + index);
}
} while (index >= 0 && !Thread.currentThread().isInterrupted());
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
do {
index = mMediaCodec.dequeueInputBuffer(1000);
if (index >= 0) {
inputBuffers[index].clear();
inputBuffers[index].put(pcm, 0, length);
if (VERBOSE) Log.d(TAG,String.format("queueInputBuffer pcm data length:%d,tmUS:%d", length, timeUs));
mMediaCodec.queueInputBuffer(index, 0, length, timeUs, 0);
}
}
while (!Thread.currentThread().isInterrupted() && index < 0);
}
@Override
public synchronized void release() {
if (mMediaCodec != null) mMediaCodec.release();
mMediaCodec = null;
super.release();
}
}

@ -0,0 +1,239 @@
package org.easydarwin.player;
import android.graphics.SurfaceTexture;
import android.net.Uri;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import androidx.annotation.IntDef;
import org.easydarwin.video.EasyPlayerClient;
/**
* Created by apple on 2017/9/9.
*/
public class EasyPlayer {
private static final java.lang.String LOG_TAG = "EasyPlayer";
private final int mTransport;
private final String mPath;
private Surface surface;
private EasyPlayerClient mRTSPClient;
private static class ComponentListener implements SurfaceHolder.Callback, TextureView.SurfaceTextureListener {
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
}
public static class EasyPlayerFactory {
private Uri mURI;
private int mTransportMode = TRANSPORT_MODE_TCP;
public static final int TRANSPORT_MODE_TCP = 1;
public static final int SETRANSPORT_MODE_UDP = 2;
private boolean autoPlayWhenReady;
// 定义Type类型
@IntDef({TRANSPORT_MODE_TCP, SETRANSPORT_MODE_UDP})
public @interface TRANSPORT_MODE {
}
public EasyPlayerFactory setUri(Uri uri) {
String scheme = uri.getScheme();
if (!"rtsp".equalsIgnoreCase(scheme)) {
throw new IllegalArgumentException("only support rtsp stream.");
}
mURI = uri;
return this;
}
public EasyPlayerFactory setPath(String path) {
setUri(Uri.parse(path));
return this;
}
public EasyPlayerFactory setAutoPlayWhenReady(boolean autoPlayWhenReady) {
this.autoPlayWhenReady = autoPlayWhenReady;
return this;
}
public EasyPlayerFactory setTransportMode(@TRANSPORT_MODE int transport) {
mTransportMode = transport;
return this;
}
public EasyPlayer build() {
if (mURI == null) throw new NullPointerException("uri should not be null!");
return new EasyPlayer(mURI.getPath(), mTransportMode);
}
}
private EasyPlayer(String path, @EasyPlayerFactory.TRANSPORT_MODE int transport) {
mPath = path;
mTransport = transport;
}
public void create() {
}
public void destroy() {
removeSurfaceCallbacks();
}
private TextureView textureView;
private SurfaceHolder surfaceHolder;
private final ComponentListener componentListener = new ComponentListener() {
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
super.surfaceCreated(surfaceHolder);
surface = surfaceHolder.getSurface();
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
super.surfaceDestroyed(surfaceHolder);
surface = null;
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1) {
super.onSurfaceTextureAvailable(surfaceTexture, i, i1);
surface = new Surface(surfaceTexture);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
surface = null;
return true;
}
};
private void removeSurfaceCallbacks() {
if (textureView != null) {
if (textureView.getSurfaceTextureListener() != componentListener) {
Log.w(LOG_TAG, "SurfaceTextureListener already unset or replaced.");
} else {
textureView.setSurfaceTextureListener(null);
}
textureView = null;
}
if (surfaceHolder != null) {
surfaceHolder.removeCallback(componentListener);
surfaceHolder = null;
}
}
private void setVideoSurfaceInternal(Surface surface) {
this.surface = surface;
}
/**
* Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be
* rendered. The player will track the lifecycle of the surface automatically.
*
* @param surfaceHolder The surface holder.
*/
public void setVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
removeSurfaceCallbacks();
this.surfaceHolder = surfaceHolder;
if (surfaceHolder == null) {
setVideoSurfaceInternal(null);
} else {
surfaceHolder.addCallback(componentListener);
Surface surface = surfaceHolder.getSurface();
setVideoSurfaceInternal(surface != null && surface.isValid() ? surface : null);
}
}
/**
* Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being
* rendered if it matches the one passed. Else does nothing.
*
* @param surfaceHolder The surface holder to clear.
*/
public void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) {
setVideoSurfaceHolder(null);
}
}
/**
* Sets the {@link SurfaceView} onto which video will be rendered. The player will track the
* lifecycle of the surface automatically.
*
* @param surfaceView The surface view.
*/
public void setVideoSurfaceView(SurfaceView surfaceView) {
setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
}
/**
* Sets the {@link TextureView} onto which video will be rendered. The player will track the
* lifecycle of the surface automatically.
*
* @param textureView The texture view.
*/
public void setVideoTextureView(TextureView textureView) {
removeSurfaceCallbacks();
this.textureView = textureView;
if (textureView == null) {
setVideoSurfaceInternal(null);
} else {
if (textureView.getSurfaceTextureListener() != null) {
Log.w(LOG_TAG, "Replacing existing SurfaceTextureListener.");
}
textureView.setSurfaceTextureListener(componentListener);
SurfaceTexture surfaceTexture = textureView.isAvailable() ? textureView.getSurfaceTexture()
: null;
setVideoSurfaceInternal(surfaceTexture == null ? null : new Surface(surfaceTexture));
}
}
}

@ -0,0 +1,99 @@
package org.easydarwin.sw;
/**
*/
public class JNIUtil {
static {
System.loadLibrary("yuv_android");
}
/**
* 1
*
* @param data
* @param offset
* @param width
* @param height
* @param degree
*/
public static void rotateMatrix(byte[] data, int offset, int width, int height, int degree) {
callMethod("RotateByteMatrix", null, data, offset, width, height, degree);
}
/**
* 2
*
* @param data
* @param offset
* @param width
* @param height
* @param degree
*/
public static void rotateShortMatrix(byte[] data, int offset, int width, int height, int degree) {
callMethod("RotateShortMatrix", null, data, offset, width, height, degree);
}
private static native void callMethod(String methodName, Object[] returnValue, Object... params);
/**
* 0 NULL,
* 1 yuv_to_yvu,
* 2 yuv_to_yuvuv,
* 3 yuv_to_yvuvu,
* 4 yuvuv_to_yuv,
* 5 yuvuv_to_yvu,
* 6 yuvuv_to_yvuvu,
*
* @param data
* @param width
* @param height
* @param mode
*/
public static native void yuvConvert(byte[] data, int width, int height, int mode);
/**
* Convert camera sample to I420 with cropping, rotation and vertical flip.
*
* @param src
* @param dst
* @param width
* @param height
* @param cropX "crop_x" and "crop_y" are starting position for cropping.
* To center, crop_x = (src_width - dst_width) / 2
* crop_y = (src_height - dst_height) / 2
* @param cropY "crop_x" and "crop_y" are starting position for cropping.
* To center, crop_x = (src_width - dst_width) / 2
* crop_y = (src_height - dst_height) / 2
* @param cropWidth
* @param cropHeight
* @param rotation "rotation" can be 0, 90, 180 or 270.
* @param mode 0:420,1:YV12,2:NV21,3:NV12
*/
public static native void ConvertToI420(byte[] src, byte[] dst, int width, int height, int cropX, int cropY, int cropWidth, int cropHeight, int rotation, int mode);
/**
* Convert camera sample to I420 with cropping, rotation and vertical flip.
*
* @param src
* @param dst
* @param width
* @param height
* @param mode 0:420,1:YV12,2:NV21,3:NV12
*/
public static native void ConvertFromI420(byte[] src, byte[] dst, int width, int height, int mode);
/**
* I420.
*
* @param src
* @param dst
* @param width
* @param height
* @param dstWidth
* @param dstHeight
* @param mode 0:Point sample; Fastest.<p>1:Filter horizontally only.<p>2:Faster than box, but lower quality scaling down.<p>3:Highest quality.
*/
public static native void I420Scale(byte[] src, byte[] dst, int width, int height, int dstWidth, int dstHeight, int mode);
}

@ -0,0 +1,354 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.easydarwin.util;
import android.media.AudioFormat;
import android.media.MediaCodec;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.UUID;
/**
* Defines constants used by the library.
*/
public final class C {
private C() {}
/**
* Special constant representing a time corresponding to the end of a source. Suitable for use in
* any time base.
*/
public static final long TIME_END_OF_SOURCE = Long.MIN_VALUE;
/**
* Special constant representing an unset or unknown time or duration. Suitable for use in any
* time base.
*/
public static final long TIME_UNSET = Long.MIN_VALUE + 1;
/**
* Represents an unset or unknown index.
*/
public static final int INDEX_UNSET = -1;
/**
* Represents an unset or unknown position.
*/
public static final int POSITION_UNSET = -1;
/**
* Represents an unset or unknown length.
*/
public static final int LENGTH_UNSET = -1;
/**
* The number of microseconds in one second.
*/
public static final long MICROS_PER_SECOND = 1000000L;
/**
* The number of nanoseconds in one second.
*/
public static final long NANOS_PER_SECOND = 1000000000L;
/**
* The name of the UTF-8 charset.
*/
public static final String UTF8_NAME = "UTF-8";
/**
* @see MediaCodec#CRYPTO_MODE_UNENCRYPTED
*/
@SuppressWarnings("InlinedApi")
public static final int CRYPTO_MODE_UNENCRYPTED = MediaCodec.CRYPTO_MODE_UNENCRYPTED;
/**
* @see MediaCodec#CRYPTO_MODE_AES_CTR
*/
@SuppressWarnings("InlinedApi")
public static final int CRYPTO_MODE_AES_CTR = MediaCodec.CRYPTO_MODE_AES_CTR;
/**
* @see AudioFormat#ENCODING_INVALID
*/
public static final int ENCODING_INVALID = AudioFormat.ENCODING_INVALID;
/**
* @see AudioFormat#ENCODING_PCM_8BIT
*/
public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT;
/**
* @see AudioFormat#ENCODING_PCM_16BIT
*/
public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT;
/**
* PCM encoding with 24 bits per sample.
*/
public static final int ENCODING_PCM_24BIT = 0x80000000;
/**
* PCM encoding with 32 bits per sample.
*/
public static final int ENCODING_PCM_32BIT = 0x40000000;
/**
* @see AudioFormat#ENCODING_AC3
*/
@SuppressWarnings("InlinedApi")
public static final int ENCODING_AC3 = AudioFormat.ENCODING_AC3;
/**
* @see AudioFormat#ENCODING_E_AC3
*/
@SuppressWarnings("InlinedApi")
public static final int ENCODING_E_AC3 = AudioFormat.ENCODING_E_AC3;
/**
* @see AudioFormat#ENCODING_DTS
*/
@SuppressWarnings("InlinedApi")
public static final int ENCODING_DTS = AudioFormat.ENCODING_DTS;
/**
* @see AudioFormat#ENCODING_DTS_HD
*/
@SuppressWarnings("InlinedApi")
public static final int ENCODING_DTS_HD = AudioFormat.ENCODING_DTS_HD;
/**
* Indicates that a buffer holds a synchronization sample.
*/
@SuppressWarnings("InlinedApi")
public static final int BUFFER_FLAG_KEY_FRAME = MediaCodec.BUFFER_FLAG_KEY_FRAME;
/**
* Flag for empty buffers that signal that the end of the stream was reached.
*/
@SuppressWarnings("InlinedApi")
public static final int BUFFER_FLAG_END_OF_STREAM = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
/**
* Indicates that a buffer is (at least partially) encrypted.
*/
public static final int BUFFER_FLAG_ENCRYPTED = 0x40000000;
/**
* Indicates that a buffer should be decoded but not rendered.
*/
public static final int BUFFER_FLAG_DECODE_ONLY = 0x80000000;
/**
* Indicates that the track should be selected if user preferences do not state otherwise.
*/
public static final int SELECTION_FLAG_DEFAULT = 1;
/**
* Indicates that the track must be displayed. Only applies to text tracks.
*/
public static final int SELECTION_FLAG_FORCED = 2;
/**
* Indicates that the player may choose to play the track in absence of an explicit user
* preference.
*/
public static final int SELECTION_FLAG_AUTOSELECT = 4;
/**
* A return value for methods where the end of an input was encountered.
*/
public static final int RESULT_END_OF_INPUT = -1;
/**
* A return value for methods where the length of parsed data exceeds the maximum length allowed.
*/
public static final int RESULT_MAX_LENGTH_EXCEEDED = -2;
/**
* A return value for methods where nothing was read.
*/
public static final int RESULT_NOTHING_READ = -3;
/**
* A return value for methods where a buffer was read.
*/
public static final int RESULT_BUFFER_READ = -4;
/**
* A return value for methods where a format was read.
*/
public static final int RESULT_FORMAT_READ = -5;
/**
* A data type constant for data of unknown or unspecified type.
*/
public static final int DATA_TYPE_UNKNOWN = 0;
/**
* A data type constant for media, typically containing media samples.
*/
public static final int DATA_TYPE_MEDIA = 1;
/**
* A data type constant for media, typically containing only initialization data.
*/
public static final int DATA_TYPE_MEDIA_INITIALIZATION = 2;
/**
* A data type constant for drm or encryption data.
*/
public static final int DATA_TYPE_DRM = 3;
/**
* A data type constant for a manifest file.
*/
public static final int DATA_TYPE_MANIFEST = 4;
/**
* A data type constant for time synchronization data.
*/
public static final int DATA_TYPE_TIME_SYNCHRONIZATION = 5;
/**
* Applications or extensions may define custom {@code DATA_TYPE_*} constants greater than or
* equal to this value.
*/
public static final int DATA_TYPE_CUSTOM_BASE = 10000;
/**
* A type constant for tracks of unknown type.
*/
public static final int TRACK_TYPE_UNKNOWN = -1;
/**
* A type constant for tracks of some default type, where the type itself is unknown.
*/
public static final int TRACK_TYPE_DEFAULT = 0;
/**
* A type constant for audio tracks.
*/
public static final int TRACK_TYPE_AUDIO = 1;
/**
* A type constant for video tracks.
*/
public static final int TRACK_TYPE_VIDEO = 2;
/**
* A type constant for text tracks.
*/
public static final int TRACK_TYPE_TEXT = 3;
/**
* A type constant for metadata tracks.
*/
public static final int TRACK_TYPE_METADATA = 4;
/**
* Applications or extensions may define custom {@code TRACK_TYPE_*} constants greater than or
* equal to this value.
*/
public static final int TRACK_TYPE_CUSTOM_BASE = 10000;
/**
* A selection reason constant for selections whose reasons are unknown or unspecified.
*/
public static final int SELECTION_REASON_UNKNOWN = 0;
/**
* A selection reason constant for an initial track selection.
*/
public static final int SELECTION_REASON_INITIAL = 1;
/**
* A selection reason constant for an manual (i.e. user initiated) track selection.
*/
public static final int SELECTION_REASON_MANUAL = 2;
/**
* A selection reason constant for an adaptive track selection.
*/
public static final int SELECTION_REASON_ADAPTIVE = 3;
/**
* A selection reason constant for a trick play track selection.
*/
public static final int SELECTION_REASON_TRICK_PLAY = 4;
/**
* Applications or extensions may define custom {@code SELECTION_REASON_*} constants greater than
* or equal to this value.
*/
public static final int SELECTION_REASON_CUSTOM_BASE = 10000;
/**
* A default size in bytes for an individual allocation that forms part of a larger buffer.
*/
public static final int DEFAULT_BUFFER_SEGMENT_SIZE = 64 * 1024;
/**
* A default size in bytes for a video buffer.
*/
public static final int DEFAULT_VIDEO_BUFFER_SIZE = 200 * DEFAULT_BUFFER_SEGMENT_SIZE;
/**
* A default size in bytes for an audio buffer.
*/
public static final int DEFAULT_AUDIO_BUFFER_SIZE = 54 * DEFAULT_BUFFER_SEGMENT_SIZE;
/**
* A default size in bytes for a text buffer.
*/
public static final int DEFAULT_TEXT_BUFFER_SIZE = 2 * DEFAULT_BUFFER_SEGMENT_SIZE;
/**
* A default size in bytes for a metadata buffer.
*/
public static final int DEFAULT_METADATA_BUFFER_SIZE = 2 * DEFAULT_BUFFER_SEGMENT_SIZE;
/**
* A default size in bytes for a muxed buffer (e.g. containing video, audio and text).
*/
public static final int DEFAULT_MUXED_BUFFER_SIZE = DEFAULT_VIDEO_BUFFER_SIZE
+ DEFAULT_AUDIO_BUFFER_SIZE + DEFAULT_TEXT_BUFFER_SIZE;
/**
* The Nil UUID as defined by
* <a href="https://tools.ietf.org/html/rfc4122#section-4.1.7">RFC4122</a>.
*/
public static final UUID UUID_NIL = new UUID(0L, 0L);
/**
* UUID for the Widevine DRM scheme.
* <p></p>
* Widevine is supported on Android devices running Android 4.3 (API Level 18) and up.
*/
public static final UUID WIDEVINE_UUID = new UUID(0xEDEF8BA979D64ACEL, 0xA3C827DCD51D21EDL);
/**
* UUID for the PlayReady DRM scheme.
* <p>
* PlayReady is supported on all AndroidTV devices. Note that most other Android devices do not
* provide PlayReady support.
*/
public static final UUID PLAYREADY_UUID = new UUID(0x9A04F07998404286L, 0xAB92E65BE0885F95L);
/**
* Indicates Monoscopic stereo layout, used with 360/3D/VR videos.
*/
public static final int STEREO_MODE_MONO = 0;
/**
* Indicates Top-Bottom stereo layout, used with 360/3D/VR videos.
*/
public static final int STEREO_MODE_TOP_BOTTOM = 1;
/**
* Indicates Left-Right stereo layout, used with 360/3D/VR videos.
*/
public static final int STEREO_MODE_LEFT_RIGHT = 2;
/**
* Converts a time in microseconds to the corresponding time in milliseconds, preserving
* {@link #TIME_UNSET} values.
*
* @param timeUs The time in microseconds.
* @return The corresponding time in milliseconds.
*/
public static long usToMs(long timeUs) {
return timeUs == TIME_UNSET ? TIME_UNSET : (timeUs / 1000);
}
/**
* Converts a time in milliseconds to the corresponding time in microseconds, preserving
* {@link #TIME_UNSET} values.
*
* @param timeMs The time in milliseconds.
* @return The corresponding time in microseconds.
*/
public static long msToUs(long timeMs) {
return timeMs == TIME_UNSET ? TIME_UNSET : (timeMs * 1000);
}
}

@ -0,0 +1,282 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.easydarwin.util;
import android.media.MediaCodecInfo;
import android.util.Pair;
import java.util.ArrayList;
import java.util.List;
/**
* Provides static utility methods for manipulating various types of codec specific data.
*/
public final class CodecSpecificDataUtil {
private static final byte[] NAL_START_CODE = new byte[] {0, 0, 0, 1};
private static final int AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY = 0xF;
public static final int[] AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE = new int[] {
96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350
};
private static final int AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID = -1;
/**
* In the channel configurations below, <A> indicates a single channel element; (A, B) indicates a
* channel pair element; and [A] indicates a low-frequency effects element.
* The speaker mapping short forms used are:
* - FC: front center
* - BC: back center
* - FL/FR: front left/right
* - FCL/FCR: front center left/right
* - FTL/FTR: front top left/right
* - SL/SR: back surround left/right
* - BL/BR: back left/right
* - LFE: low frequency effects
*/
private static final int[] AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE =
new int[] {
0,
1, /* mono: <FC> */
2, /* stereo: (FL, FR) */
3, /* 3.0: <FC>, (FL, FR) */
4, /* 4.0: <FC>, (FL, FR), <BC> */
5, /* 5.0 back: <FC>, (FL, FR), (SL, SR) */
6, /* 5.1 back: <FC>, (FL, FR), (SL, SR), <BC>, [LFE] */
8, /* 7.1 wide back: <FC>, (FCL, FCR), (FL, FR), (SL, SR), [LFE] */
AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID,
AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID,
AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID,
7, /* 6.1: <FC>, (FL, FR), (SL, SR), <RC>, [LFE] */
8, /* 7.1: <FC>, (FL, FR), (SL, SR), (BL, BR), [LFE] */
AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID,
8, /* 7.1 top: <FC>, (FL, FR), (SL, SR), [LFE], (FTL, FTR) */
AUDIO_SPECIFIC_CONFIG_CHANNEL_CONFIGURATION_INVALID
};
// Advanced Audio Coding Low-Complexity profile.
private static final int AUDIO_OBJECT_TYPE_AAC_LC = 2;
// Spectral Band Replication.
private static final int AUDIO_OBJECT_TYPE_SBR = 5;
// Error Resilient Bit-Sliced Arithmetic Coding.
private static final int AUDIO_OBJECT_TYPE_ER_BSAC = 22;
// Parametric Stereo.
private static final int AUDIO_OBJECT_TYPE_PS = 29;
private CodecSpecificDataUtil() {}
/**
* Parses an AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1
*
* @param audioSpecificConfig The AudioSpecificConfig to parse.
* @return A pair consisting of the sample rate in Hz and the channel count.
*/
public static Pair<Integer, Integer> parseAacAudioSpecificConfig(byte[] audioSpecificConfig) {
ParsableBitArray bitArray = new ParsableBitArray(audioSpecificConfig);
int audioObjectType = bitArray.readBits(5);
int frequencyIndex = bitArray.readBits(4);
int sampleRate;
if (frequencyIndex == AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY) {
sampleRate = bitArray.readBits(24);
} else {
sampleRate = AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[frequencyIndex];
}
int channelConfiguration = bitArray.readBits(4);
if (audioObjectType == AUDIO_OBJECT_TYPE_SBR || audioObjectType == AUDIO_OBJECT_TYPE_PS) {
// For an AAC bitstream using spectral band replication (SBR) or parametric stereo (PS) with
// explicit signaling, we return the extension sampling frequency as the sample rate of the
// content; this is identical to the sample rate of the decoded output but may differ from
// the sample rate set above.
// Use the extensionSamplingFrequencyIndex.
frequencyIndex = bitArray.readBits(4);
if (frequencyIndex == AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY) {
sampleRate = bitArray.readBits(24);
} else {
sampleRate = AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[frequencyIndex];
}
audioObjectType = bitArray.readBits(5);
if (audioObjectType == AUDIO_OBJECT_TYPE_ER_BSAC) {
// Use the extensionChannelConfiguration.
channelConfiguration = bitArray.readBits(4);
}
}
int channelCount = AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[channelConfiguration];
return Pair.create(sampleRate, channelCount);
}
/**
* Builds a simple HE-AAC LC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1
*
* @param sampleRate The sample rate in Hz.
* @param numChannels The number of channels.
* @return The AudioSpecificConfig.
*/
public static byte[] buildAacLcAudioSpecificConfig(int sampleRate, int numChannels) {
int sampleRateIndex = C.INDEX_UNSET;
for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE.length; ++i) {
if (sampleRate == AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[i]) {
sampleRateIndex = i;
}
}
int channelConfig = C.INDEX_UNSET;
for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE.length; ++i) {
if (numChannels == AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[i]) {
channelConfig = i;
}
}
if (sampleRate == C.INDEX_UNSET || channelConfig == C.INDEX_UNSET) {
throw new IllegalArgumentException("Invalid sample rate or number of channels: "
+ sampleRate + ", " + numChannels);
}
return buildAacAudioSpecificConfig(AUDIO_OBJECT_TYPE_AAC_LC, sampleRateIndex, channelConfig);
}
/**
* Builds a simple AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1
*
* @param audioObjectType The audio object type.
* @param sampleRateIndex The sample rate index.
* @param channelConfig The channel configuration.
* @return The AudioSpecificConfig.
*/
public static byte[] buildAacAudioSpecificConfig(int audioObjectType, int sampleRateIndex,
int channelConfig) {
byte[] specificConfig = new byte[2];
specificConfig[0] = (byte) (((audioObjectType << 3) & 0xF8) | ((sampleRateIndex >> 1) & 0x07));
specificConfig[1] = (byte) (((sampleRateIndex << 7) & 0x80) | ((channelConfig << 3) & 0x78));
return specificConfig;
}
/**
* Constructs a NAL unit consisting of the NAL start code followed by the specified data.
*
* @param data An array containing the data that should follow the NAL start code.
* @param offset The start offset into {@code data}.
* @param length The number of bytes to copy from {@code data}
* @return The constructed NAL unit.
*/
public static byte[] buildNalUnit(byte[] data, int offset, int length) {
byte[] nalUnit = new byte[length + NAL_START_CODE.length];
System.arraycopy(NAL_START_CODE, 0, nalUnit, 0, NAL_START_CODE.length);
System.arraycopy(data, offset, nalUnit, NAL_START_CODE.length, length);
return nalUnit;
}
/**
* Splits an array of NAL units.
* <p>
* If the input consists of NAL start code delimited units, then the returned array consists of
* the split NAL units, each of which is still prefixed with the NAL start code. For any other
* input, null is returned.
*
* @param data An array of data.
* @return The individual NAL units, or null if the input did not consist of NAL start code
* delimited units.
*/
public static byte[][] splitNalUnits(byte[] data) {
if (!isNalStartCode(data, 0)) {
// data does not consist of NAL start code delimited units.
return null;
}
List<Integer> starts = new ArrayList<>();
int nalUnitIndex = 0;
do {
starts.add(nalUnitIndex);
nalUnitIndex = findNalStartCode(data, nalUnitIndex + NAL_START_CODE.length);
} while (nalUnitIndex != C.INDEX_UNSET);
byte[][] split = new byte[starts.size()][];
for (int i = 0; i < starts.size(); i++) {
int startIndex = starts.get(i);
int endIndex = i < starts.size() - 1 ? starts.get(i + 1) : data.length;
byte[] nal = new byte[endIndex - startIndex];
System.arraycopy(data, startIndex, nal, 0, nal.length);
split[i] = nal;
}
return split;
}
/**
* Finds the next occurrence of the NAL start code from a given index.
*
* @param data The data in which to search.
* @param index The first index to test.
* @return The index of the first byte of the found start code, or {@link C#INDEX_UNSET}.
*/
private static int findNalStartCode(byte[] data, int index) {
int endIndex = data.length - NAL_START_CODE.length;
for (int i = index; i <= endIndex; i++) {
if (isNalStartCode(data, i)) {
return i;
}
}
return C.INDEX_UNSET;
}
/**
* Tests whether there exists a NAL start code at a given index.
*
* @param data The data.
* @param index The index to test.
* @return Whether there exists a start code that begins at {@code index}.
*/
private static boolean isNalStartCode(byte[] data, int index) {
if (data.length - index <= NAL_START_CODE.length) {
return false;
}
for (int j = 0; j < NAL_START_CODE.length; j++) {
if (data[index + j] != NAL_START_CODE[j]) {
return false;
}
}
return true;
}
/**
* Returns a color format that is supported by the codec and by this test code. If no
* match is found, this throws a test failure -- the set of formats known to the test
* should be expanded for new platforms.
*/
public static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length; i++) {
int colorFormat = capabilities.colorFormats[i];
if (isRecognizedFormat(colorFormat)) {
return colorFormat;
}
}
return 0; // not reached
}
/**
* Returns true if this is a color format that this test code understands (i.e. we know how
* to read and generate frames in this format).
*/
private static boolean isRecognizedFormat(int colorFormat) {
switch (colorFormat) {
// these are the formats we know how to handle for this test
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
return true;
default:
return false;
}
}
}

@ -0,0 +1,184 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.easydarwin.util;
/**
* Wraps a byte array, providing methods that allow it to be read as a bitstream.
*/
public final class ParsableBitArray {
public byte[] data;
// The offset within the data, stored as the current byte offset, and the bit offset within that
// byte (from 0 to 7).
private int byteOffset;
private int bitOffset;
private int byteLimit;
/**
* Creates a new instance that initially has no backing data.
*/
public ParsableBitArray() {}
/**
* Creates a new instance that wraps an existing array.
*
* @param data The data to wrap.
*/
public ParsableBitArray(byte[] data) {
this(data, data.length);
}
/**
* Creates a new instance that wraps an existing array.
*
* @param data The data to wrap.
* @param limit The limit in bytes.
*/
public ParsableBitArray(byte[] data, int limit) {
this.data = data;
byteLimit = limit;
}
/**
* Updates the instance to wrap {@code data}, and resets the position to zero.
*
* @param data The array to wrap.
*/
public void reset(byte[] data) {
reset(data, data.length);
}
/**
* Updates the instance to wrap {@code data}, and resets the position to zero.
*
* @param data The array to wrap.
* @param limit The limit in bytes.
*/
public void reset(byte[] data, int limit) {
this.data = data;
byteOffset = 0;
bitOffset = 0;
byteLimit = limit;
}
/**
* Returns the number of bits yet to be read.
*/
public int bitsLeft() {
return (byteLimit - byteOffset) * 8 - bitOffset;
}
/**
* Returns the current bit offset.
*/
public int getPosition() {
return byteOffset * 8 + bitOffset;
}
/**
* Sets the current bit offset.
*
* @param position The position to set.
*/
public void setPosition(int position) {
byteOffset = position / 8;
bitOffset = position - (byteOffset * 8);
assertValidOffset();
}
/**
* Skips bits and moves current reading position forward.
*
* @param n The number of bits to skip.
*/
public void skipBits(int n) {
byteOffset += (n / 8);
bitOffset += (n % 8);
if (bitOffset > 7) {
byteOffset++;
bitOffset -= 8;
}
assertValidOffset();
}
/**
* Reads a single bit.
*
* @return Whether the bit is set.
*/
public boolean readBit() {
return readBits(1) == 1;
}
/**
* Reads up to 32 bits.
*
* @param numBits The number of bits to read.
* @return An integer whose bottom n bits hold the read data.
*/
public int readBits(int numBits) {
if (numBits == 0) {
return 0;
}
int returnValue = 0;
// Read as many whole bytes as we can.
int wholeBytes = (numBits / 8);
for (int i = 0; i < wholeBytes; i++) {
int byteValue;
if (bitOffset != 0) {
byteValue = ((data[byteOffset] & 0xFF) << bitOffset)
| ((data[byteOffset + 1] & 0xFF) >>> (8 - bitOffset));
} else {
byteValue = data[byteOffset];
}
numBits -= 8;
returnValue |= (byteValue & 0xFF) << numBits;
byteOffset++;
}
// Read any remaining bits.
if (numBits > 0) {
int nextBit = bitOffset + numBits;
byte writeMask = (byte) (0xFF >> (8 - numBits));
if (nextBit > 8) {
// Combine bits from current byte and next byte.
returnValue |= ((((data[byteOffset] & 0xFF) << (nextBit - 8)
| ((data[byteOffset + 1] & 0xFF) >> (16 - nextBit))) & writeMask));
byteOffset++;
} else {
// Bits to be read only within current byte.
returnValue |= (((data[byteOffset] & 0xFF) >> (8 - nextBit)) & writeMask);
if (nextBit == 8) {
byteOffset++;
}
}
bitOffset = nextBit % 8;
}
assertValidOffset();
return returnValue;
}
private void assertValidOffset() {
// It is fine for position to be at the end of the array, but no further.
}
}

@ -0,0 +1,514 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.easydarwin.util;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
/**
* Wraps a byte array, providing a set of methods for parsing data from it. Numerical values are
* parsed with the assumption that their constituent bytes are in big endian order.
*/
public final class ParsableByteArray {
public byte[] data;
private int position;
private int limit;
/**
* Creates a new instance that initially has no backing data.
*/
public ParsableByteArray() {}
/**
* Creates a new instance with {@code limit} bytes and sets the limit.
*
* @param limit The limit to set.
*/
public ParsableByteArray(int limit) {
this.data = new byte[limit];
this.limit = limit;
}
/**
* Creates a new instance wrapping {@code data}, and sets the limit to {@code data.length}.
*
* @param data The array to wrap.
*/
public ParsableByteArray(byte[] data) {
this.data = data;
limit = data.length;
}
/**
* Creates a new instance that wraps an existing array.
*
* @param data The data to wrap.
* @param limit The limit to set.
*/
public ParsableByteArray(byte[] data, int limit) {
this.data = data;
this.limit = limit;
}
/**
* Resets the position to zero and the limit to the specified value. If the limit exceeds the
* capacity, {@code data} is replaced with a new array of sufficient size.
*
* @param limit The limit to set.
*/
public void reset(int limit) {
reset(capacity() < limit ? new byte[limit] : data, limit);
}
/**
* Updates the instance to wrap {@code data}, and resets the position to zero.
*
* @param data The array to wrap.
* @param limit The limit to set.
*/
public void reset(byte[] data, int limit) {
this.data = data;
this.limit = limit;
position = 0;
}
/**
* Sets the position and limit to zero.
*/
public void reset() {
position = 0;
limit = 0;
}
/**
* Returns the number of bytes yet to be read.
*/
public int bytesLeft() {
return limit - position;
}
/**
* Returns the limit.
*/
public int limit() {
return limit;
}
/**
* Sets the limit.
*
* @param limit The limit to set.
*/
public void setLimit(int limit) {
// Assertions.checkArgument(limit >= 0 && limit <= data.length);
this.limit = limit;
}
/**
* Returns the current offset in the array, in bytes.
*/
public int getPosition() {
return position;
}
/**
* Returns the capacity of the array, which may be larger than the limit.
*/
public int capacity() {
return data == null ? 0 : data.length;
}
/**
* Sets the reading offset in the array.
*
* @param position Byte offset in the array from which to read.
* @throws IllegalArgumentException Thrown if the new position is neither in nor at the end of the
* array.
*/
public void setPosition(int position) {
// It is fine for position to be at the end of the array.
// Assertions.checkArgument(position >= 0 && position <= limit);
this.position = position;
}
/**
* Moves the reading offset by {@code bytes}.
*
* @param bytes The number of bytes to skip.
* @throws IllegalArgumentException Thrown if the new position is neither in nor at the end of the
* array.
*/
public void skipBytes(int bytes) {
setPosition(position + bytes);
}
/**
* Reads the next {@code length} bytes into {@code bitArray}, and resets the position of
* {@code bitArray} to zero.
*
* @param bitArray The {@link ParsableBitArray} into which the bytes should be read.
* @param length The number of bytes to write.
*/
public void readBytes(ParsableBitArray bitArray, int length) {
readBytes(bitArray.data, 0, length);
bitArray.setPosition(0);
}
/**
* Reads the next {@code length} bytes into {@code buffer} at {@code offset}.
*
* @see System#arraycopy(Object, int, Object, int, int)
* @param buffer The array into which the read data should be written.
* @param offset The offset in {@code buffer} at which the read data should be written.
* @param length The number of bytes to read.
*/
public void readBytes(byte[] buffer, int offset, int length) {
System.arraycopy(data, position, buffer, offset, length);
position += length;
}
/**
* Reads the next {@code length} bytes into {@code buffer}.
*
* @see ByteBuffer#put(byte[], int, int)
* @param buffer The {@link ByteBuffer} into which the read data should be written.
* @param length The number of bytes to read.
*/
public void readBytes(ByteBuffer buffer, int length) {
buffer.put(data, position, length);
position += length;
}
/**
* Peeks at the next byte as an unsigned value.
*/
public int peekUnsignedByte() {
return (data[position] & 0xFF);
}
/**
* Reads the next byte as an unsigned value.
*/
public int readUnsignedByte() {
return (data[position++] & 0xFF);
}
/**
* Reads the next two bytes as an unsigned value.
*/
public int readUnsignedShort() {
return (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF);
}
/**
* Reads the next two bytes as an unsigned value.
*/
public int readLittleEndianUnsignedShort() {
return (data[position++] & 0xFF) | (data[position++] & 0xFF) << 8;
}
/**
* Reads the next two bytes as an signed value.
*/
public short readShort() {
return (short) ((data[position++] & 0xFF) << 8
| (data[position++] & 0xFF));
}
/**
* Reads the next two bytes as a signed value.
*/
public short readLittleEndianShort() {
return (short) ((data[position++] & 0xFF) | (data[position++] & 0xFF) << 8);
}
/**
* Reads the next three bytes as an unsigned value.
*/
public int readUnsignedInt24() {
return (data[position++] & 0xFF) << 16
| (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF);
}
/**
* Reads the next three bytes as a signed value in little endian order.
*/
public int readLittleEndianInt24() {
return (data[position++] & 0xFF)
| (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF) << 16;
}
/**
* Reads the next three bytes as an unsigned value in little endian order.
*/
public int readLittleEndianUnsignedInt24() {
return (data[position++] & 0xFF)
| (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF) << 16;
}
/**
* Reads the next four bytes as an unsigned value.
*/
public long readUnsignedInt() {
return (data[position++] & 0xFFL) << 24
| (data[position++] & 0xFFL) << 16
| (data[position++] & 0xFFL) << 8
| (data[position++] & 0xFFL);
}
/**
* Reads the next four bytes as an unsigned value in little endian order.
*/
public long readLittleEndianUnsignedInt() {
return (data[position++] & 0xFFL)
| (data[position++] & 0xFFL) << 8
| (data[position++] & 0xFFL) << 16
| (data[position++] & 0xFFL) << 24;
}
/**
* Reads the next four bytes as a signed value
*/
public int readInt() {
return (data[position++] & 0xFF) << 24
| (data[position++] & 0xFF) << 16
| (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF);
}
/**
* Reads the next four bytes as an signed value in little endian order.
*/
public int readLittleEndianInt() {
return (data[position++] & 0xFF)
| (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF) << 16
| (data[position++] & 0xFF) << 24;
}
/**
* Reads the next eight bytes as a signed value.
*/
public long readLong() {
return (data[position++] & 0xFFL) << 56
| (data[position++] & 0xFFL) << 48
| (data[position++] & 0xFFL) << 40
| (data[position++] & 0xFFL) << 32
| (data[position++] & 0xFFL) << 24
| (data[position++] & 0xFFL) << 16
| (data[position++] & 0xFFL) << 8
| (data[position++] & 0xFFL);
}
/**
* Reads the next eight bytes as a signed value in little endian order.
*/
public long readLittleEndianLong() {
return (data[position++] & 0xFFL)
| (data[position++] & 0xFFL) << 8
| (data[position++] & 0xFFL) << 16
| (data[position++] & 0xFFL) << 24
| (data[position++] & 0xFFL) << 32
| (data[position++] & 0xFFL) << 40
| (data[position++] & 0xFFL) << 48
| (data[position++] & 0xFFL) << 56;
}
/**
* Reads the next four bytes, returning the integer portion of the fixed point 16.16 integer.
*/
public int readUnsignedFixedPoint1616() {
int result = (data[position++] & 0xFF) << 8
| (data[position++] & 0xFF);
position += 2; // Skip the non-integer portion.
return result;
}
/**
* Reads a Synchsafe integer.
* <p>
* Synchsafe integers keep the highest bit of every byte zeroed. A 32 bit synchsafe integer can
* store 28 bits of information.
*
* @return The parsed value.
*/
public int readSynchSafeInt() {
int b1 = readUnsignedByte();
int b2 = readUnsignedByte();
int b3 = readUnsignedByte();
int b4 = readUnsignedByte();
return (b1 << 21) | (b2 << 14) | (b3 << 7) | b4;
}
/**
* Reads the next four bytes as an unsigned integer into an integer, if the top bit is a zero.
*
* @throws IllegalStateException Thrown if the top bit of the input data is set.
*/
public int readUnsignedIntToInt() {
int result = readInt();
if (result < 0) {
throw new IllegalStateException("Top bit not zero: " + result);
}
return result;
}
/**
* Reads the next four bytes as a little endian unsigned integer into an integer, if the top bit
* is a zero.
*
* @throws IllegalStateException Thrown if the top bit of the input data is set.
*/
public int readLittleEndianUnsignedIntToInt() {
int result = readLittleEndianInt();
if (result < 0) {
throw new IllegalStateException("Top bit not zero: " + result);
}
return result;
}
/**
* Reads the next eight bytes as an unsigned long into a long, if the top bit is a zero.
*
* @throws IllegalStateException Thrown if the top bit of the input data is set.
*/
public long readUnsignedLongToLong() {
long result = readLong();
if (result < 0) {
throw new IllegalStateException("Top bit not zero: " + result);
}
return result;
}
/**
* Reads the next four bytes as a 32-bit floating point value.
*/
public float readFloat() {
return Float.intBitsToFloat(readInt());
}
/**
* Reads the next eight bytes as a 64-bit floating point value.
*/
public double readDouble() {
return Double.longBitsToDouble(readLong());
}
/**
* Reads the next {@code length} bytes as UTF-8 characters.
*
* @param length The number of bytes to read.
* @return The string encoded by the bytes.
*/
public String readString(int length) {
return readString(length, Charset.defaultCharset());
}
/**
* Reads the next {@code length} bytes as characters in the specified {@link Charset}.
*
* @param length The number of bytes to read.
* @param charset The character set of the encoded characters.
* @return The string encoded by the bytes in the specified character set.
*/
public String readString(int length, Charset charset) {
String result = new String(data, position, length, charset);
position += length;
return result;
}
/**
* Reads a line of text.
* <p>
* A line is considered to be terminated by any one of a carriage return ('\r'), a line feed
* ('\n'), or a carriage return followed immediately by a line feed ('\r\n'). The system's default
* charset (UTF-8) is used.
*
* @return A String containing the contents of the line, not including any line-termination
* characters, or null if the end of the stream has been reached.
*/
public String readLine() {
if (bytesLeft() == 0) {
return null;
}
int lineLimit = position;
while (lineLimit < limit && data[lineLimit] != '\n' && data[lineLimit] != '\r') {
lineLimit++;
}
if (lineLimit - position >= 3 && data[position] == (byte) 0xEF
&& data[position + 1] == (byte) 0xBB && data[position + 2] == (byte) 0xBF) {
// There's a byte order mark at the start of the line. Discard it.
position += 3;
}
String line = new String(data, position, lineLimit - position);
position = lineLimit;
if (position == limit) {
return line;
}
if (data[position] == '\r') {
position++;
if (position == limit) {
return line;
}
}
if (data[position] == '\n') {
position++;
}
return line;
}
/**
* Reads a long value encoded by UTF-8 encoding
*
* @throws NumberFormatException if there is a problem with decoding
* @return Decoded long value
*/
public long readUtf8EncodedLong() {
int length = 0;
long value = data[position];
// find the high most 0 bit
for (int j = 7; j >= 0; j--) {
if ((value & (1 << j)) == 0) {
if (j < 6) {
value &= (1 << j) - 1;
length = 7 - j;
} else if (j == 7) {
length = 1;
}
break;
}
}
if (length == 0) {
throw new NumberFormatException("Invalid UTF-8 sequence first byte: " + value);
}
for (int i = 1; i < length; i++) {
int x = data[position + i];
if ((x & 0xC0) != 0x80) { // if the high most 0 bit not 7th
throw new NumberFormatException("Invalid UTF-8 sequence continuation byte: " + value);
}
value = (value << 6) | (x & 0x3F);
}
position += length;
return value;
}
}

@ -0,0 +1,60 @@
package org.easydarwin.util;
import android.graphics.SurfaceTexture;
import android.view.TextureView;
import androidx.annotation.NonNull;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.LifecycleRegistry;
import java.lang.ref.WeakReference;
public class TextureLifecycler implements LifecycleOwner {
private LifecycleRegistry mLifecycleRegistry = new LifecycleRegistry(this);
@NonNull
@Override
public Lifecycle getLifecycle() {
return mLifecycleRegistry;
}
WeakReference<TextureView> mRef;
public TextureLifecycler(TextureView view) {
mRef = new WeakReference<>(view);
mLifecycleRegistry.markState(Lifecycle.State.INITIALIZED);
if (view.isAvailable()) {
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_CREATE);
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_START);
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_RESUME);
}
view.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_CREATE);
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_START);
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_RESUME);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_PAUSE);
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_STOP);
mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_DESTROY);
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
}
}

@ -0,0 +1,407 @@
package org.easydarwin.video;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.util.SparseArray;
import androidx.core.content.ContextCompat;
import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashSet;
import java.util.Set;
/**
* Created by John on 2016/3/12.
*/
public class Client implements Closeable {
private static int sKey;
private static Context mContext;
private volatile int paused = 0;
private static final Handler h = new Handler(Looper.getMainLooper());
private static Set<Integer> _channelPause = new HashSet<>();
private final Runnable closeTask = new Runnable() {
@Override
public void run() {
if (paused > 0) {
Log.i(TAG, "realPause! close stream");
closeStream();
paused = 2;
}
}
};
private int _channel;
private String _url;
private int _type;
private int _mediaType;
private String _user;
private String _pwd;
private int _sendOption;
public static final class FrameInfo {
public int codec; /* 音视频格式 */
public int type; /* 视频帧类型 */
public byte fps; /* 视频帧率 */
public short width; /* 视频宽 */
public short height; /* 视频高 */
public int reserved1; /* 保留参数1 */
public int reserved2; /* 保留参数2 */
public int sample_rate; /* 音频采样率 */
public int channels; /* 音频声道数 */
public int bits_per_sample; /* 音频采样精度 */
public int length; /* 音视频帧大小 */
public long timestamp_usec; /* 时间戳,微妙 */
public long timestamp_sec; /* 时间戳 秒 */
public long stamp;
public float bitrate; /* 比特率 */
public float losspacket; /* 丢包率 */
public byte[] buffer;
public int offset = 0;
public boolean audio;
}
public static final class MediaInfo {
// Easy_U32 u32VideoCodec; /*  ”∆µ±‡¬Î¿‡–Õ */
// Easy_U32 u32VideoFps; /*  ”∆µ÷°¬  */
//
// Easy_U32 u32AudioCodec; /* “Ù∆µ±‡¬Î¿‡–Õ */
// Easy_U32 u32AudioSamplerate; /* “Ù∆µ≤…—˘¬  */
// Easy_U32 u32AudioChannel; /* “Ù∆µÕ®µ¿ ˝ */
// Easy_U32 u32AudioBitsPerSample; /* “Ù∆µ≤…—˘æ´∂» */
//
// Easy_U32 u32H264SpsLength; /*  ”∆µsps÷°≥§∂» */
// Easy_U32 u32H264PpsLength; /*  ”∆µpps÷°≥§∂» */
// Easy_U8 u8H264Sps[128]; /*  ”∆µsps÷°ƒ» */
// Easy_U8 u8H264Pps[36]; /*  ”∆µsps÷°ƒ» */
int videoCodec;
int fps;
int audioCodec;
int sample;
int channel;
int bitPerSample;
int spsLen;
int ppsLen;
byte[] sps;
byte[] pps;
@Override
public String toString() {
return "MediaInfo{" +
"videoCodec=" + videoCodec +
", fps=" + fps +
", audioCodec=" + audioCodec +
", sample=" + sample +
", channel=" + channel +
", bitPerSample=" + bitPerSample +
", spsLen=" + spsLen +
", ppsLen=" + ppsLen +
'}';
}
}
public interface SourceCallBack {
void onSourceCallBack(int _channelId, int _channelPtr, int _frameType, FrameInfo frameInfo);
void onMediaInfoCallBack(int _channelId, MediaInfo mi);
void onEvent(int _channelId, int err, int info);
}
public static final int EASY_SDK_VIDEO_FRAME_FLAG = 0x01;
public static final int EASY_SDK_AUDIO_FRAME_FLAG = 0x02;
public static final int EASY_SDK_EVENT_FRAME_FLAG = 0x04;
public static final int EASY_SDK_RTP_FRAME_FLAG = 0x08; /* RTP帧标志 */
public static final int EASY_SDK_SDP_FRAME_FLAG = 0x10; /* SDP帧标志 */
public static final int EASY_SDK_MEDIA_INFO_FLAG = 0x20; /* 媒体类型标志*/
public static final int EASY_SDK_EVENT_CODEC_ERROR = 0x63657272; /* ERROR */
public static final int EASY_SDK_EVENT_CODEC_EXIT = 0x65786974; /* EXIT */
public static final int TRANSTYPE_TCP = 1;
public static final int TRANSTYPE_UDP = 2;
private static final String TAG = Client.class.getSimpleName();
static {
System.loadLibrary("EasyRTSPClient");
}
private long mCtx;
private static final SparseArray<SourceCallBack> sCallbacks = new SparseArray<>();
Client(Context context) {
if (context == null) {
throw new NullPointerException();
}
mCtx = init(context, "");
mContext = context.getApplicationContext();
}
int registerCallback(SourceCallBack cb) {
synchronized (sCallbacks) {
sCallbacks.put(++sKey, cb);
return sKey;
}
}
void unrigisterCallback(SourceCallBack cb) {
synchronized (sCallbacks) {
int idx = sCallbacks.indexOfValue(cb);
if (idx != -1) {
sCallbacks.removeAt(idx);
}
}
}
public int getLastErrorCode() {
return getErrorCode(mCtx);
}
public int openStream(int channel, String url, int type, int sendOption, int mediaType, String user, String pwd) {
_channel = channel;
_url = url;
_type = type;
_mediaType = mediaType;
_user = user;
_pwd = pwd;
_sendOption = sendOption;
return openStream();
}
public void closeStream() {
h.removeCallbacks(closeTask);
if (mCtx != 0) {
closeStream(mCtx);
}
}
private static native int getErrorCode(long context);
private native long init(Context context, String key);
private native int deInit(long context);
private int openStream() {
if (null == _url) {
throw new NullPointerException();
}
if (mCtx == 0) {
throw new IllegalStateException("初始化失败KEY不合法");
}
return openStream(mCtx, _channel, _url, _type, _mediaType, _user, _pwd, 1000, 0, _sendOption);
}
private native int openStream(long context, int channel, String url, int type, int mediaType, String user, String pwd, int reconn, int outRtpPacket, int rtspOption);
// private native int startRecord(int context, String path);
//
// private native void stopRecord(int context);
private native void closeStream(long context);
private static void save2path(byte[] buffer, int offset, int length, String path, boolean append) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(path, append);
fos.write(buffer, offset, length);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private static void onSourceCallBack(int _channelId, int _channelPtr, int _frameType, byte[] pBuf, byte[] frameBuffer) {
if (BuildConfig.MEDIA_DEBUG) {
int permissionCheck = ContextCompat.checkSelfPermission(mContext, Manifest.permission.WRITE_EXTERNAL_STORAGE);
if (permissionCheck == PackageManager.PERMISSION_GRANTED) {
// frameType + size + buffer
if (_frameType != 0) {
ByteBuffer bf = ByteBuffer.allocate(5);
bf.put((byte) _frameType);
if (_frameType == EASY_SDK_MEDIA_INFO_FLAG) {
bf.putInt(pBuf.length);
save2path(bf.array(), 0, 5, "/sdcard/media_degbu.data", true);
save2path(pBuf, 0, pBuf.length, "/sdcard/media_degbu.data", true);
} else {
bf.putInt(frameBuffer.length);
save2path(bf.array(), 0, 5, "/sdcard/media_degbu.data", true);
save2path(frameBuffer, 0, frameBuffer.length, "/sdcard/media_degbu.data", true);
}
}
}
}
final SourceCallBack callBack;
synchronized (sCallbacks) {
callBack = sCallbacks.get(_channelId);
}
if (_frameType == 0) {
if (callBack != null) {
callBack.onSourceCallBack(_channelId, _channelPtr, _frameType, null);
}
return;
}
if (_frameType == EASY_SDK_MEDIA_INFO_FLAG) {
if (callBack != null) {
MediaInfo mi = new MediaInfo();
ByteBuffer buffer = ByteBuffer.wrap(pBuf);
buffer.order(ByteOrder.LITTLE_ENDIAN);
mi.videoCodec = buffer.getInt();
mi.fps = buffer.getInt();
mi.audioCodec = buffer.getInt();
mi.sample = buffer.getInt();
mi.channel = buffer.getInt();
mi.bitPerSample = buffer.getInt();
mi.spsLen = buffer.getInt();
mi.ppsLen = buffer.getInt();
mi.sps = new byte[128];
mi.pps = new byte[36];
buffer.get(mi.sps);
buffer.get(mi.pps);
// int videoCodec;int fps;
// int audioCodec;int sample;int channel;int bitPerSample;
// int spsLen;
// int ppsLen;
// byte[]sps;
// byte[]pps;
callBack.onMediaInfoCallBack(_channelId, mi);
}
return;
}
ByteBuffer buffer = ByteBuffer.wrap(frameBuffer);
buffer.order(ByteOrder.LITTLE_ENDIAN);
FrameInfo fi = new FrameInfo();
fi.codec = buffer.getInt();
fi.type = buffer.getInt();
fi.fps = buffer.get();
buffer.get();
fi.width = buffer.getShort();
fi.height = buffer.getShort();
buffer.getInt();
buffer.getInt();
buffer.getShort();
fi.sample_rate = buffer.getInt();
fi.channels = buffer.getInt();
fi.bits_per_sample = buffer.getInt();
fi.length = buffer.getInt();
fi.timestamp_usec = buffer.getInt();
fi.timestamp_sec = buffer.getInt();
long sec = fi.timestamp_sec < 0 ? Integer.MAX_VALUE - Integer.MIN_VALUE + 1 + fi.timestamp_sec : fi.timestamp_sec;
long usec = fi.timestamp_usec < 0 ? Integer.MAX_VALUE - Integer.MIN_VALUE + 1 + fi.timestamp_usec : fi.timestamp_usec;
fi.stamp = sec * 1000000 + usec;
// long differ = fi.stamp - mPreviewStamp;
// Log.d(TAG, String.format("%s:%d,%d,%d, %d", EASY_SDK_VIDEO_FRAME_FLAG == _frameType ? "视频" : "音频", fi.stamp, fi.timestamp_sec, fi.timestamp_usec, differ));
fi.buffer = pBuf;
boolean paused = false;
synchronized (_channelPause) {
paused = _channelPause.contains(_channelId);
}
if (callBack != null) {
if (paused) {
Log.i(TAG, "channel_" + _channelId + " is paused!");
}
callBack.onSourceCallBack(_channelId, _channelPtr, _frameType, fi);
}
}
private static void onEvent(int channel, int err, int state) {
// state1 Connecting, 2 连接错误, 3 连接线程退出
// err的含义http请求的返回码200400401等等
Log.e(TAG, String.format("__RTSPClientCallBack onEvent: err=%d, state=%d", err, state));
synchronized (sCallbacks) {
final SourceCallBack callBack = sCallbacks.get(channel);
if (callBack != null) {
callBack.onEvent(channel, err, state);
}
}
}
public void pause() {
if (Looper.myLooper() != Looper.getMainLooper()) {
throw new IllegalThreadStateException("please call pause in Main thread!");
}
synchronized (_channelPause) {
_channelPause.add(_channel);
}
paused = 1;
Log.i(TAG, "pause:=" + 1);
h.postDelayed(closeTask, 10000);
}
public void resume() {
if (Looper.myLooper() != Looper.getMainLooper()) {
throw new IllegalThreadStateException("call resume in Main thread!");
}
synchronized (_channelPause) {
_channelPause.remove(_channel);
}
h.removeCallbacks(closeTask);
if (paused == 2) {
Log.i(TAG, "resume:=" + 0);
openStream();
}
Log.i(TAG, "resume:=" + 0);
paused = 0;
}
@Override
public void close() throws IOException {
h.removeCallbacks(closeTask);
_channelPause.remove(_channel);
if (mCtx == 0)
throw new IOException("not opened or already closed");
deInit(mCtx);
mCtx = 0;
}
}

@ -0,0 +1,196 @@
package org.easydarwin.video;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Build;
import android.os.SystemClock;
import android.text.TextUtils;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.InvalidParameterException;
import static android.media.MediaCodec.BUFFER_FLAG_KEY_FRAME;
/**
* Created by John on 2017/1/10.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class EasyMuxer {
public static final boolean VERBOSE = true;
private static final String TAG = EasyMuxer.class.getSimpleName();
private final String mFilePath;
private boolean hasAudio;
private MediaMuxer mMuxer;
private final long durationMillis;
private int index = 0;
private int mVideoTrackIndex = -1;
private int mAudioTrackIndex = -1;
private long mBeginMillis = 0L;
private MediaFormat mVideoFormat;
private MediaFormat mAudioFormat;
private long video_stample = 0;
private long audio_stample = 0;
public EasyMuxer(String path, boolean hasAudio, long durationMillis) {
if (TextUtils.isEmpty(path)){
throw new InvalidParameterException("path should not be empty!");
}
if (path.toLowerCase().endsWith(".mp4")){
path = path.substring(0, path.toLowerCase().lastIndexOf(".mp4"));
}
mFilePath = path;
this.hasAudio = hasAudio;
this.durationMillis = durationMillis;
Object mux = null;
try {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
mux = new MediaMuxer(path + "-" + index++ + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
mMuxer = (MediaMuxer) mux;
}
}
public synchronized void addTrack(MediaFormat format, boolean isVideo) {
// now that we have the Magic Goodies, start the muxer
if (mAudioTrackIndex != -1 && mVideoTrackIndex != -1)
throw new RuntimeException("already add all tracks");
int track = mMuxer.addTrack(format);
if (VERBOSE)
Log.i(TAG, String.format("addTrack %s result %d", isVideo ? "video" : "audio", track));
if (isVideo) {
mVideoFormat = format;
mVideoTrackIndex = track;
if (mAudioTrackIndex != -1 || !hasAudio) {
if (VERBOSE)
Log.i(TAG, "both audio and video added,and muxer is started");
mMuxer.start();
}
} else {
mAudioFormat = format;
mAudioTrackIndex = track;
if (mVideoTrackIndex != -1) {
mMuxer.start();
}
}
}
public synchronized void pumpStream(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo, boolean isVideo) {
if (mMuxer == null) Log.w(TAG,"muxer is null!");
if (mVideoTrackIndex == -1) {
Log.i(TAG, String.format("pumpStream [%s] but muxer is not start.ignore..", isVideo ? "video" : "audio"));
return;
}
if (mAudioTrackIndex == -1 && hasAudio) {
Log.i(TAG, String.format("pumpStream [%s] but muxer is not start.ignore..", isVideo ? "video" : "audio"));
return;
}
if (isVideo && mBeginMillis == 0L){ // 首帧需要是关键帧
if ((bufferInfo.flags & BUFFER_FLAG_KEY_FRAME) == 0){
Log.i(TAG, String.format("pumpStream [%s] but key frame not GOTTEN.ignore..", isVideo ? "video" : "audio"));
return;
}
}
if (!isVideo && mBeginMillis == 0L){
Log.i(TAG, String.format("pumpStream [%s] but video frame not GOTTEN.ignore..", isVideo ? "video" : "audio"));
return;
}
if (isVideo && mBeginMillis == 0L){
mBeginMillis = SystemClock.elapsedRealtime();
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
} else if (bufferInfo.size != 0) {
if (isVideo && mVideoTrackIndex == -1) {
throw new InvalidParameterException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if (VERBOSE)
Log.d(TAG, String.format("sent %s [" + bufferInfo.size + "] with timestamp:[%d] to muxer", isVideo ? "video" : "audio", bufferInfo.presentationTimeUs / 1000));
if (isVideo){
if (video_stample != 0){
if (bufferInfo.presentationTimeUs - video_stample <= 0){
Log.w(TAG,"video timestample goback, ignore!");
return;
}
video_stample = bufferInfo.presentationTimeUs;
}else{
video_stample = bufferInfo.presentationTimeUs;
}
}else {
if (audio_stample != 0){
if (bufferInfo.presentationTimeUs - audio_stample <= 0){
Log.w(TAG,"audio timestample goback, ignore!");
return;
}
audio_stample = bufferInfo.presentationTimeUs;
}else{
audio_stample = bufferInfo.presentationTimeUs;
}
}
mMuxer.writeSampleData(isVideo ? mVideoTrackIndex : mAudioTrackIndex, outputBuffer, bufferInfo);
}
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE)
Log.i(TAG, "BUFFER_FLAG_END_OF_STREAM received");
}
if (SystemClock.elapsedRealtime() - mBeginMillis >= durationMillis && isVideo && ((bufferInfo.flags & BUFFER_FLAG_KEY_FRAME) != 0)) {
if (VERBOSE)
Log.i(TAG, String.format("record file reach expiration.create new file:" + index));
try {
mMuxer.stop();
mMuxer.release();
}catch (Exception e){
e.printStackTrace();
}
mMuxer = null;
mVideoTrackIndex = mAudioTrackIndex = -1;
try {
mMuxer = new MediaMuxer(mFilePath + "-" + index++ + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
addTrack(mVideoFormat, true);
if (mAudioFormat != null) {
addTrack(mAudioFormat, false);
}
mBeginMillis = 0L;
pumpStream(outputBuffer, bufferInfo, isVideo);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public synchronized void release() {
if (mMuxer != null) {
if (mVideoTrackIndex != -1 && (mAudioTrackIndex != -1 || !hasAudio)) {
if (VERBOSE)
Log.i(TAG, String.format("muxer is started. now it will be stoped."));
try {
mMuxer.stop();
mMuxer.release();
} catch (IllegalStateException ex) {
ex.printStackTrace();
}
}
mMuxer = null;
}
mBeginMillis = 0L;
}
}

@ -0,0 +1,29 @@
package org.easydarwin.video;
import java.lang.annotation.Native;
/**
* Created by John on 2017/1/10.
*/
public class EasyMuxer2 {
static {
System.loadLibrary("proffmpeg");
System.loadLibrary("VideoCodecer");
}
public static final int AVMEDIA_TYPE_VIDEO = 0;
public static final int AVMEDIA_TYPE_AUDIO = 1;
public static final int VIDEO_TYPE_H264 = 0;
public static final int VIDEO_TYPE_H265 = 1;
@Native
private long ctx;
public native int create(String path, int videoType, int width, int height, byte[] extra, int sample, int channel);
public native int writeFrame(int streamType, byte[] frame, int offset, int length, long timeStampMillis);
public native void close();
}

@ -0,0 +1,100 @@
package org.easydarwin.video;
import java.nio.ByteBuffer;
/**
* Created by John on 2017/1/5.
*/
public class VideoCodec {
static {
System.loadLibrary("proffmpeg");
System.loadLibrary("VideoCodecer");
}
public static final int DECODER_H264 = 0;
public static final int DECODER_H265 = 1;
private native long create(Object surface, int codec);
private native void close(long handle);
protected long mHandle;
private native int decode(long handle, byte[] in, int offset, int length, int[] size);
private native ByteBuffer decodeYUV(long handle, byte[] in, int offset, int length, int[] size);
private native void releaseYUV(ByteBuffer buffer);
private native void decodeYUV2(long handle, ByteBuffer buffer, int width, int height);
public int decoder_create(Object surface, int codec) {
mHandle = create(surface, codec);
if (mHandle != 0) {
return 0;
}
return -1;
}
public int decoder_decode(byte[] in, int offset, int length, int[] size) {
int result = decode(mHandle, in, offset, length, size);
return result;
}
public ByteBuffer decoder_decodeYUV(byte[] in, int offset, int length, int[] size) {
ByteBuffer buffer = decodeYUV(mHandle, in, offset, length, size);
return buffer;
}
public void decoder_releaseBuffer(ByteBuffer buffer) {
releaseYUV(buffer);
}
public void decoder_decodeBuffer(ByteBuffer buffer, int width, int height) {
decodeYUV2(mHandle, buffer, width, height);
}
public void decoder_close() {
if (mHandle == 0) {
return;
}
close(mHandle);
mHandle = 0;
}
public static class VideoDecoderLite extends VideoCodec {
private int[] mSize;
private Object surface;
public void create(Object surface, boolean h264) {
// if (surface == null) {
// throw new NullPointerException("surface is null!");
// }
this.surface = surface;
decoder_create(surface, h264 ? 0 : 1);
mSize = new int[2];
}
public void close() {
decoder_close();
}
protected int decodeFrame(Client.FrameInfo aFrame, int[] size) {
int nRet = 0;
nRet = decoder_decode(aFrame.buffer, aFrame.offset, aFrame.length, size);
return nRet;
}
protected ByteBuffer decodeFrameYUV(Client.FrameInfo aFrame, int[] size) {
return decoder_decodeYUV(aFrame.buffer, aFrame.offset, aFrame.length, size);
}
protected void releaseBuffer(ByteBuffer buffer) {
decoder_releaseBuffer(buffer);
}
}
}

@ -0,0 +1,3 @@
<resources>
<string name="app_name">RTSPClient</string>
</resources>

@ -3,4 +3,4 @@ include ':library-push'
include ':library-ijkplayer'
include ':library-serialPort'
include ':library-common'
include ':library-vlc'
include ':library-rtsp'

Loading…
Cancel
Save