parseAacAudioSpecificConfig(byte[] audioSpecificConfig) {
+ ParsableBitArray bitArray = new ParsableBitArray(audioSpecificConfig);
+ int audioObjectType = bitArray.readBits(5);
+ int frequencyIndex = bitArray.readBits(4);
+ int sampleRate;
+ if (frequencyIndex == AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY) {
+ sampleRate = bitArray.readBits(24);
+ } else {
+ sampleRate = AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[frequencyIndex];
+ }
+ int channelConfiguration = bitArray.readBits(4);
+ if (audioObjectType == AUDIO_OBJECT_TYPE_SBR || audioObjectType == AUDIO_OBJECT_TYPE_PS) {
+ // For an AAC bitstream using spectral band replication (SBR) or parametric stereo (PS) with
+ // explicit signaling, we return the extension sampling frequency as the sample rate of the
+ // content; this is identical to the sample rate of the decoded output but may differ from
+ // the sample rate set above.
+ // Use the extensionSamplingFrequencyIndex.
+ frequencyIndex = bitArray.readBits(4);
+ if (frequencyIndex == AUDIO_SPECIFIC_CONFIG_FREQUENCY_INDEX_ARBITRARY) {
+ sampleRate = bitArray.readBits(24);
+ } else {
+ sampleRate = AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[frequencyIndex];
+ }
+ audioObjectType = bitArray.readBits(5);
+ if (audioObjectType == AUDIO_OBJECT_TYPE_ER_BSAC) {
+ // Use the extensionChannelConfiguration.
+ channelConfiguration = bitArray.readBits(4);
+ }
+ }
+ int channelCount = AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[channelConfiguration];
+ return Pair.create(sampleRate, channelCount);
+ }
+
+ /**
+ * Builds a simple HE-AAC LC AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1
+ *
+ * @param sampleRate The sample rate in Hz.
+ * @param numChannels The number of channels.
+ * @return The AudioSpecificConfig.
+ */
+ public static byte[] buildAacLcAudioSpecificConfig(int sampleRate, int numChannels) {
+ int sampleRateIndex = C.INDEX_UNSET;
+ for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE.length; ++i) {
+ if (sampleRate == AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[i]) {
+ sampleRateIndex = i;
+ }
+ }
+ int channelConfig = C.INDEX_UNSET;
+ for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE.length; ++i) {
+ if (numChannels == AUDIO_SPECIFIC_CONFIG_CHANNEL_COUNT_TABLE[i]) {
+ channelConfig = i;
+ }
+ }
+ if (sampleRate == C.INDEX_UNSET || channelConfig == C.INDEX_UNSET) {
+ throw new IllegalArgumentException("Invalid sample rate or number of channels: "
+ + sampleRate + ", " + numChannels);
+ }
+ return buildAacAudioSpecificConfig(AUDIO_OBJECT_TYPE_AAC_LC, sampleRateIndex, channelConfig);
+ }
+
+ /**
+ * Builds a simple AudioSpecificConfig, as defined in ISO 14496-3 1.6.2.1
+ *
+ * @param audioObjectType The audio object type.
+ * @param sampleRateIndex The sample rate index.
+ * @param channelConfig The channel configuration.
+ * @return The AudioSpecificConfig.
+ */
+ public static byte[] buildAacAudioSpecificConfig(int audioObjectType, int sampleRateIndex,
+ int channelConfig) {
+ byte[] specificConfig = new byte[2];
+ specificConfig[0] = (byte) (((audioObjectType << 3) & 0xF8) | ((sampleRateIndex >> 1) & 0x07));
+ specificConfig[1] = (byte) (((sampleRateIndex << 7) & 0x80) | ((channelConfig << 3) & 0x78));
+ return specificConfig;
+ }
+
+ /**
+ * Constructs a NAL unit consisting of the NAL start code followed by the specified data.
+ *
+ * @param data An array containing the data that should follow the NAL start code.
+ * @param offset The start offset into {@code data}.
+ * @param length The number of bytes to copy from {@code data}
+ * @return The constructed NAL unit.
+ */
+ public static byte[] buildNalUnit(byte[] data, int offset, int length) {
+ byte[] nalUnit = new byte[length + NAL_START_CODE.length];
+ System.arraycopy(NAL_START_CODE, 0, nalUnit, 0, NAL_START_CODE.length);
+ System.arraycopy(data, offset, nalUnit, NAL_START_CODE.length, length);
+ return nalUnit;
+ }
+
+ /**
+ * Splits an array of NAL units.
+ *
+ * If the input consists of NAL start code delimited units, then the returned array consists of
+ * the split NAL units, each of which is still prefixed with the NAL start code. For any other
+ * input, null is returned.
+ *
+ * @param data An array of data.
+ * @return The individual NAL units, or null if the input did not consist of NAL start code
+ * delimited units.
+ */
+ public static byte[][] splitNalUnits(byte[] data) {
+ if (!isNalStartCode(data, 0)) {
+ // data does not consist of NAL start code delimited units.
+ return null;
+ }
+ List starts = new ArrayList<>();
+ int nalUnitIndex = 0;
+ do {
+ starts.add(nalUnitIndex);
+ nalUnitIndex = findNalStartCode(data, nalUnitIndex + NAL_START_CODE.length);
+ } while (nalUnitIndex != C.INDEX_UNSET);
+ byte[][] split = new byte[starts.size()][];
+ for (int i = 0; i < starts.size(); i++) {
+ int startIndex = starts.get(i);
+ int endIndex = i < starts.size() - 1 ? starts.get(i + 1) : data.length;
+ byte[] nal = new byte[endIndex - startIndex];
+ System.arraycopy(data, startIndex, nal, 0, nal.length);
+ split[i] = nal;
+ }
+ return split;
+ }
+
+ /**
+ * Finds the next occurrence of the NAL start code from a given index.
+ *
+ * @param data The data in which to search.
+ * @param index The first index to test.
+ * @return The index of the first byte of the found start code, or {@link C#INDEX_UNSET}.
+ */
+ private static int findNalStartCode(byte[] data, int index) {
+ int endIndex = data.length - NAL_START_CODE.length;
+ for (int i = index; i <= endIndex; i++) {
+ if (isNalStartCode(data, i)) {
+ return i;
+ }
+ }
+ return C.INDEX_UNSET;
+ }
+
+ /**
+ * Tests whether there exists a NAL start code at a given index.
+ *
+ * @param data The data.
+ * @param index The index to test.
+ * @return Whether there exists a start code that begins at {@code index}.
+ */
+ private static boolean isNalStartCode(byte[] data, int index) {
+ if (data.length - index <= NAL_START_CODE.length) {
+ return false;
+ }
+ for (int j = 0; j < NAL_START_CODE.length; j++) {
+ if (data[index + j] != NAL_START_CODE[j]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+
+ /**
+ * Returns a color format that is supported by the codec and by this test code. If no
+ * match is found, this throws a test failure -- the set of formats known to the test
+ * should be expanded for new platforms.
+ */
+ public static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
+ MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
+ for (int i = 0; i < capabilities.colorFormats.length; i++) {
+ int colorFormat = capabilities.colorFormats[i];
+ if (isRecognizedFormat(colorFormat)) {
+ return colorFormat;
+ }
+ }
+ return 0; // not reached
+ }
+ /**
+ * Returns true if this is a color format that this test code understands (i.e. we know how
+ * to read and generate frames in this format).
+ */
+ private static boolean isRecognizedFormat(int colorFormat) {
+ switch (colorFormat) {
+ // these are the formats we know how to handle for this test
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
+ case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
+ return true;
+ default:
+ return false;
+ }
+ }
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/util/ParsableBitArray.java b/library-rtsp/src/main/java/org/easydarwin/util/ParsableBitArray.java
new file mode 100644
index 0000000..5f71c45
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/util/ParsableBitArray.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.easydarwin.util;
+
+/**
+ * Wraps a byte array, providing methods that allow it to be read as a bitstream.
+ */
+public final class ParsableBitArray {
+
+ public byte[] data;
+
+ // The offset within the data, stored as the current byte offset, and the bit offset within that
+ // byte (from 0 to 7).
+ private int byteOffset;
+ private int bitOffset;
+ private int byteLimit;
+
+ /**
+ * Creates a new instance that initially has no backing data.
+ */
+ public ParsableBitArray() {}
+
+ /**
+ * Creates a new instance that wraps an existing array.
+ *
+ * @param data The data to wrap.
+ */
+ public ParsableBitArray(byte[] data) {
+ this(data, data.length);
+ }
+
+ /**
+ * Creates a new instance that wraps an existing array.
+ *
+ * @param data The data to wrap.
+ * @param limit The limit in bytes.
+ */
+ public ParsableBitArray(byte[] data, int limit) {
+ this.data = data;
+ byteLimit = limit;
+ }
+
+ /**
+ * Updates the instance to wrap {@code data}, and resets the position to zero.
+ *
+ * @param data The array to wrap.
+ */
+ public void reset(byte[] data) {
+ reset(data, data.length);
+ }
+
+ /**
+ * Updates the instance to wrap {@code data}, and resets the position to zero.
+ *
+ * @param data The array to wrap.
+ * @param limit The limit in bytes.
+ */
+ public void reset(byte[] data, int limit) {
+ this.data = data;
+ byteOffset = 0;
+ bitOffset = 0;
+ byteLimit = limit;
+ }
+
+ /**
+ * Returns the number of bits yet to be read.
+ */
+ public int bitsLeft() {
+ return (byteLimit - byteOffset) * 8 - bitOffset;
+ }
+
+ /**
+ * Returns the current bit offset.
+ */
+ public int getPosition() {
+ return byteOffset * 8 + bitOffset;
+ }
+
+ /**
+ * Sets the current bit offset.
+ *
+ * @param position The position to set.
+ */
+ public void setPosition(int position) {
+ byteOffset = position / 8;
+ bitOffset = position - (byteOffset * 8);
+ assertValidOffset();
+ }
+
+ /**
+ * Skips bits and moves current reading position forward.
+ *
+ * @param n The number of bits to skip.
+ */
+ public void skipBits(int n) {
+ byteOffset += (n / 8);
+ bitOffset += (n % 8);
+ if (bitOffset > 7) {
+ byteOffset++;
+ bitOffset -= 8;
+ }
+ assertValidOffset();
+ }
+
+ /**
+ * Reads a single bit.
+ *
+ * @return Whether the bit is set.
+ */
+ public boolean readBit() {
+ return readBits(1) == 1;
+ }
+
+ /**
+ * Reads up to 32 bits.
+ *
+ * @param numBits The number of bits to read.
+ * @return An integer whose bottom n bits hold the read data.
+ */
+ public int readBits(int numBits) {
+ if (numBits == 0) {
+ return 0;
+ }
+
+ int returnValue = 0;
+
+ // Read as many whole bytes as we can.
+ int wholeBytes = (numBits / 8);
+ for (int i = 0; i < wholeBytes; i++) {
+ int byteValue;
+ if (bitOffset != 0) {
+ byteValue = ((data[byteOffset] & 0xFF) << bitOffset)
+ | ((data[byteOffset + 1] & 0xFF) >>> (8 - bitOffset));
+ } else {
+ byteValue = data[byteOffset];
+ }
+ numBits -= 8;
+ returnValue |= (byteValue & 0xFF) << numBits;
+ byteOffset++;
+ }
+
+ // Read any remaining bits.
+ if (numBits > 0) {
+ int nextBit = bitOffset + numBits;
+ byte writeMask = (byte) (0xFF >> (8 - numBits));
+
+ if (nextBit > 8) {
+ // Combine bits from current byte and next byte.
+ returnValue |= ((((data[byteOffset] & 0xFF) << (nextBit - 8)
+ | ((data[byteOffset + 1] & 0xFF) >> (16 - nextBit))) & writeMask));
+ byteOffset++;
+ } else {
+ // Bits to be read only within current byte.
+ returnValue |= (((data[byteOffset] & 0xFF) >> (8 - nextBit)) & writeMask);
+ if (nextBit == 8) {
+ byteOffset++;
+ }
+ }
+
+ bitOffset = nextBit % 8;
+ }
+
+ assertValidOffset();
+ return returnValue;
+ }
+
+ private void assertValidOffset() {
+ // It is fine for position to be at the end of the array, but no further.
+ }
+
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/util/ParsableByteArray.java b/library-rtsp/src/main/java/org/easydarwin/util/ParsableByteArray.java
new file mode 100644
index 0000000..a0b41c3
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/util/ParsableByteArray.java
@@ -0,0 +1,514 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.easydarwin.util;
+
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+
+/**
+ * Wraps a byte array, providing a set of methods for parsing data from it. Numerical values are
+ * parsed with the assumption that their constituent bytes are in big endian order.
+ */
+public final class ParsableByteArray {
+
+ public byte[] data;
+
+ private int position;
+ private int limit;
+
+ /**
+ * Creates a new instance that initially has no backing data.
+ */
+ public ParsableByteArray() {}
+
+ /**
+ * Creates a new instance with {@code limit} bytes and sets the limit.
+ *
+ * @param limit The limit to set.
+ */
+ public ParsableByteArray(int limit) {
+ this.data = new byte[limit];
+ this.limit = limit;
+ }
+
+ /**
+ * Creates a new instance wrapping {@code data}, and sets the limit to {@code data.length}.
+ *
+ * @param data The array to wrap.
+ */
+ public ParsableByteArray(byte[] data) {
+ this.data = data;
+ limit = data.length;
+ }
+
+ /**
+ * Creates a new instance that wraps an existing array.
+ *
+ * @param data The data to wrap.
+ * @param limit The limit to set.
+ */
+ public ParsableByteArray(byte[] data, int limit) {
+ this.data = data;
+ this.limit = limit;
+ }
+
+ /**
+ * Resets the position to zero and the limit to the specified value. If the limit exceeds the
+ * capacity, {@code data} is replaced with a new array of sufficient size.
+ *
+ * @param limit The limit to set.
+ */
+ public void reset(int limit) {
+ reset(capacity() < limit ? new byte[limit] : data, limit);
+ }
+
+ /**
+ * Updates the instance to wrap {@code data}, and resets the position to zero.
+ *
+ * @param data The array to wrap.
+ * @param limit The limit to set.
+ */
+ public void reset(byte[] data, int limit) {
+ this.data = data;
+ this.limit = limit;
+ position = 0;
+ }
+
+ /**
+ * Sets the position and limit to zero.
+ */
+ public void reset() {
+ position = 0;
+ limit = 0;
+ }
+
+ /**
+ * Returns the number of bytes yet to be read.
+ */
+ public int bytesLeft() {
+ return limit - position;
+ }
+
+ /**
+ * Returns the limit.
+ */
+ public int limit() {
+ return limit;
+ }
+
+ /**
+ * Sets the limit.
+ *
+ * @param limit The limit to set.
+ */
+ public void setLimit(int limit) {
+// Assertions.checkArgument(limit >= 0 && limit <= data.length);
+ this.limit = limit;
+ }
+
+ /**
+ * Returns the current offset in the array, in bytes.
+ */
+ public int getPosition() {
+ return position;
+ }
+
+ /**
+ * Returns the capacity of the array, which may be larger than the limit.
+ */
+ public int capacity() {
+ return data == null ? 0 : data.length;
+ }
+
+ /**
+ * Sets the reading offset in the array.
+ *
+ * @param position Byte offset in the array from which to read.
+ * @throws IllegalArgumentException Thrown if the new position is neither in nor at the end of the
+ * array.
+ */
+ public void setPosition(int position) {
+ // It is fine for position to be at the end of the array.
+// Assertions.checkArgument(position >= 0 && position <= limit);
+ this.position = position;
+ }
+
+ /**
+ * Moves the reading offset by {@code bytes}.
+ *
+ * @param bytes The number of bytes to skip.
+ * @throws IllegalArgumentException Thrown if the new position is neither in nor at the end of the
+ * array.
+ */
+ public void skipBytes(int bytes) {
+ setPosition(position + bytes);
+ }
+
+ /**
+ * Reads the next {@code length} bytes into {@code bitArray}, and resets the position of
+ * {@code bitArray} to zero.
+ *
+ * @param bitArray The {@link ParsableBitArray} into which the bytes should be read.
+ * @param length The number of bytes to write.
+ */
+ public void readBytes(ParsableBitArray bitArray, int length) {
+ readBytes(bitArray.data, 0, length);
+ bitArray.setPosition(0);
+ }
+
+ /**
+ * Reads the next {@code length} bytes into {@code buffer} at {@code offset}.
+ *
+ * @see System#arraycopy(Object, int, Object, int, int)
+ * @param buffer The array into which the read data should be written.
+ * @param offset The offset in {@code buffer} at which the read data should be written.
+ * @param length The number of bytes to read.
+ */
+ public void readBytes(byte[] buffer, int offset, int length) {
+ System.arraycopy(data, position, buffer, offset, length);
+ position += length;
+ }
+
+ /**
+ * Reads the next {@code length} bytes into {@code buffer}.
+ *
+ * @see ByteBuffer#put(byte[], int, int)
+ * @param buffer The {@link ByteBuffer} into which the read data should be written.
+ * @param length The number of bytes to read.
+ */
+ public void readBytes(ByteBuffer buffer, int length) {
+ buffer.put(data, position, length);
+ position += length;
+ }
+
+ /**
+ * Peeks at the next byte as an unsigned value.
+ */
+ public int peekUnsignedByte() {
+ return (data[position] & 0xFF);
+ }
+
+ /**
+ * Reads the next byte as an unsigned value.
+ */
+ public int readUnsignedByte() {
+ return (data[position++] & 0xFF);
+ }
+
+ /**
+ * Reads the next two bytes as an unsigned value.
+ */
+ public int readUnsignedShort() {
+ return (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF);
+ }
+
+ /**
+ * Reads the next two bytes as an unsigned value.
+ */
+ public int readLittleEndianUnsignedShort() {
+ return (data[position++] & 0xFF) | (data[position++] & 0xFF) << 8;
+ }
+
+ /**
+ * Reads the next two bytes as an signed value.
+ */
+ public short readShort() {
+ return (short) ((data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF));
+ }
+
+ /**
+ * Reads the next two bytes as a signed value.
+ */
+ public short readLittleEndianShort() {
+ return (short) ((data[position++] & 0xFF) | (data[position++] & 0xFF) << 8);
+ }
+
+ /**
+ * Reads the next three bytes as an unsigned value.
+ */
+ public int readUnsignedInt24() {
+ return (data[position++] & 0xFF) << 16
+ | (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF);
+ }
+
+ /**
+ * Reads the next three bytes as a signed value in little endian order.
+ */
+ public int readLittleEndianInt24() {
+ return (data[position++] & 0xFF)
+ | (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF) << 16;
+ }
+
+ /**
+ * Reads the next three bytes as an unsigned value in little endian order.
+ */
+ public int readLittleEndianUnsignedInt24() {
+ return (data[position++] & 0xFF)
+ | (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF) << 16;
+ }
+
+ /**
+ * Reads the next four bytes as an unsigned value.
+ */
+ public long readUnsignedInt() {
+ return (data[position++] & 0xFFL) << 24
+ | (data[position++] & 0xFFL) << 16
+ | (data[position++] & 0xFFL) << 8
+ | (data[position++] & 0xFFL);
+ }
+
+ /**
+ * Reads the next four bytes as an unsigned value in little endian order.
+ */
+ public long readLittleEndianUnsignedInt() {
+ return (data[position++] & 0xFFL)
+ | (data[position++] & 0xFFL) << 8
+ | (data[position++] & 0xFFL) << 16
+ | (data[position++] & 0xFFL) << 24;
+ }
+
+ /**
+ * Reads the next four bytes as a signed value
+ */
+ public int readInt() {
+ return (data[position++] & 0xFF) << 24
+ | (data[position++] & 0xFF) << 16
+ | (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF);
+ }
+
+ /**
+ * Reads the next four bytes as an signed value in little endian order.
+ */
+ public int readLittleEndianInt() {
+ return (data[position++] & 0xFF)
+ | (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF) << 16
+ | (data[position++] & 0xFF) << 24;
+ }
+
+ /**
+ * Reads the next eight bytes as a signed value.
+ */
+ public long readLong() {
+ return (data[position++] & 0xFFL) << 56
+ | (data[position++] & 0xFFL) << 48
+ | (data[position++] & 0xFFL) << 40
+ | (data[position++] & 0xFFL) << 32
+ | (data[position++] & 0xFFL) << 24
+ | (data[position++] & 0xFFL) << 16
+ | (data[position++] & 0xFFL) << 8
+ | (data[position++] & 0xFFL);
+ }
+
+ /**
+ * Reads the next eight bytes as a signed value in little endian order.
+ */
+ public long readLittleEndianLong() {
+ return (data[position++] & 0xFFL)
+ | (data[position++] & 0xFFL) << 8
+ | (data[position++] & 0xFFL) << 16
+ | (data[position++] & 0xFFL) << 24
+ | (data[position++] & 0xFFL) << 32
+ | (data[position++] & 0xFFL) << 40
+ | (data[position++] & 0xFFL) << 48
+ | (data[position++] & 0xFFL) << 56;
+ }
+
+ /**
+ * Reads the next four bytes, returning the integer portion of the fixed point 16.16 integer.
+ */
+ public int readUnsignedFixedPoint1616() {
+ int result = (data[position++] & 0xFF) << 8
+ | (data[position++] & 0xFF);
+ position += 2; // Skip the non-integer portion.
+ return result;
+ }
+
+ /**
+ * Reads a Synchsafe integer.
+ *
+ * Synchsafe integers keep the highest bit of every byte zeroed. A 32 bit synchsafe integer can
+ * store 28 bits of information.
+ *
+ * @return The parsed value.
+ */
+ public int readSynchSafeInt() {
+ int b1 = readUnsignedByte();
+ int b2 = readUnsignedByte();
+ int b3 = readUnsignedByte();
+ int b4 = readUnsignedByte();
+ return (b1 << 21) | (b2 << 14) | (b3 << 7) | b4;
+ }
+
+ /**
+ * Reads the next four bytes as an unsigned integer into an integer, if the top bit is a zero.
+ *
+ * @throws IllegalStateException Thrown if the top bit of the input data is set.
+ */
+ public int readUnsignedIntToInt() {
+ int result = readInt();
+ if (result < 0) {
+ throw new IllegalStateException("Top bit not zero: " + result);
+ }
+ return result;
+ }
+
+ /**
+ * Reads the next four bytes as a little endian unsigned integer into an integer, if the top bit
+ * is a zero.
+ *
+ * @throws IllegalStateException Thrown if the top bit of the input data is set.
+ */
+ public int readLittleEndianUnsignedIntToInt() {
+ int result = readLittleEndianInt();
+ if (result < 0) {
+ throw new IllegalStateException("Top bit not zero: " + result);
+ }
+ return result;
+ }
+
+ /**
+ * Reads the next eight bytes as an unsigned long into a long, if the top bit is a zero.
+ *
+ * @throws IllegalStateException Thrown if the top bit of the input data is set.
+ */
+ public long readUnsignedLongToLong() {
+ long result = readLong();
+ if (result < 0) {
+ throw new IllegalStateException("Top bit not zero: " + result);
+ }
+ return result;
+ }
+
+ /**
+ * Reads the next four bytes as a 32-bit floating point value.
+ */
+ public float readFloat() {
+ return Float.intBitsToFloat(readInt());
+ }
+
+ /**
+ * Reads the next eight bytes as a 64-bit floating point value.
+ */
+ public double readDouble() {
+ return Double.longBitsToDouble(readLong());
+ }
+
+ /**
+ * Reads the next {@code length} bytes as UTF-8 characters.
+ *
+ * @param length The number of bytes to read.
+ * @return The string encoded by the bytes.
+ */
+ public String readString(int length) {
+ return readString(length, Charset.defaultCharset());
+ }
+
+ /**
+ * Reads the next {@code length} bytes as characters in the specified {@link Charset}.
+ *
+ * @param length The number of bytes to read.
+ * @param charset The character set of the encoded characters.
+ * @return The string encoded by the bytes in the specified character set.
+ */
+ public String readString(int length, Charset charset) {
+ String result = new String(data, position, length, charset);
+ position += length;
+ return result;
+ }
+
+ /**
+ * Reads a line of text.
+ *
+ * A line is considered to be terminated by any one of a carriage return ('\r'), a line feed
+ * ('\n'), or a carriage return followed immediately by a line feed ('\r\n'). The system's default
+ * charset (UTF-8) is used.
+ *
+ * @return A String containing the contents of the line, not including any line-termination
+ * characters, or null if the end of the stream has been reached.
+ */
+ public String readLine() {
+ if (bytesLeft() == 0) {
+ return null;
+ }
+ int lineLimit = position;
+ while (lineLimit < limit && data[lineLimit] != '\n' && data[lineLimit] != '\r') {
+ lineLimit++;
+ }
+ if (lineLimit - position >= 3 && data[position] == (byte) 0xEF
+ && data[position + 1] == (byte) 0xBB && data[position + 2] == (byte) 0xBF) {
+ // There's a byte order mark at the start of the line. Discard it.
+ position += 3;
+ }
+ String line = new String(data, position, lineLimit - position);
+ position = lineLimit;
+ if (position == limit) {
+ return line;
+ }
+ if (data[position] == '\r') {
+ position++;
+ if (position == limit) {
+ return line;
+ }
+ }
+ if (data[position] == '\n') {
+ position++;
+ }
+ return line;
+ }
+
+ /**
+ * Reads a long value encoded by UTF-8 encoding
+ *
+ * @throws NumberFormatException if there is a problem with decoding
+ * @return Decoded long value
+ */
+ public long readUtf8EncodedLong() {
+ int length = 0;
+ long value = data[position];
+ // find the high most 0 bit
+ for (int j = 7; j >= 0; j--) {
+ if ((value & (1 << j)) == 0) {
+ if (j < 6) {
+ value &= (1 << j) - 1;
+ length = 7 - j;
+ } else if (j == 7) {
+ length = 1;
+ }
+ break;
+ }
+ }
+ if (length == 0) {
+ throw new NumberFormatException("Invalid UTF-8 sequence first byte: " + value);
+ }
+ for (int i = 1; i < length; i++) {
+ int x = data[position + i];
+ if ((x & 0xC0) != 0x80) { // if the high most 0 bit not 7th
+ throw new NumberFormatException("Invalid UTF-8 sequence continuation byte: " + value);
+ }
+ value = (value << 6) | (x & 0x3F);
+ }
+ position += length;
+ return value;
+ }
+
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/util/TextureLifecycler.java b/library-rtsp/src/main/java/org/easydarwin/util/TextureLifecycler.java
new file mode 100644
index 0000000..b9ca4be
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/util/TextureLifecycler.java
@@ -0,0 +1,60 @@
+package org.easydarwin.util;
+
+import android.graphics.SurfaceTexture;
+import android.view.TextureView;
+
+import androidx.annotation.NonNull;
+import androidx.lifecycle.Lifecycle;
+import androidx.lifecycle.LifecycleOwner;
+import androidx.lifecycle.LifecycleRegistry;
+
+import java.lang.ref.WeakReference;
+
+public class TextureLifecycler implements LifecycleOwner {
+ private LifecycleRegistry mLifecycleRegistry = new LifecycleRegistry(this);
+
+ @NonNull
+ @Override
+ public Lifecycle getLifecycle() {
+ return mLifecycleRegistry;
+ }
+
+ WeakReference mRef;
+
+ public TextureLifecycler(TextureView view) {
+ mRef = new WeakReference<>(view);
+ mLifecycleRegistry.markState(Lifecycle.State.INITIALIZED);
+ if (view.isAvailable()) {
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_CREATE);
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_START);
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_RESUME);
+ }
+ view.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
+ @Override
+ public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_CREATE);
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_START);
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_RESUME);
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_PAUSE);
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_STOP);
+ mLifecycleRegistry.handleLifecycleEvent(Lifecycle.Event.ON_DESTROY);
+ return true;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+
+ }
+ });
+ }
+
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/video/Client.java b/library-rtsp/src/main/java/org/easydarwin/video/Client.java
new file mode 100644
index 0000000..421ca23
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/video/Client.java
@@ -0,0 +1,407 @@
+package org.easydarwin.video;
+
+import android.Manifest;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+import android.util.SparseArray;
+
+import androidx.core.content.ContextCompat;
+
+import java.io.Closeable;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Created by John on 2016/3/12.
+ */
+public class Client implements Closeable {
+
+ private static int sKey;
+ private static Context mContext;
+ private volatile int paused = 0;
+ private static final Handler h = new Handler(Looper.getMainLooper());
+ private static Set _channelPause = new HashSet<>();
+ private final Runnable closeTask = new Runnable() {
+ @Override
+ public void run() {
+ if (paused > 0) {
+ Log.i(TAG, "realPause! close stream");
+ closeStream();
+ paused = 2;
+ }
+ }
+ };
+
+ private int _channel;
+ private String _url;
+ private int _type;
+ private int _mediaType;
+ private String _user;
+ private String _pwd;
+ private int _sendOption;
+
+ public static final class FrameInfo {
+ public int codec; /* 音视频格式 */
+
+ public int type; /* 视频帧类型 */
+ public byte fps; /* 视频帧率 */
+ public short width; /* 视频宽 */
+ public short height; /* 视频高 */
+
+ public int reserved1; /* 保留参数1 */
+ public int reserved2; /* 保留参数2 */
+
+ public int sample_rate; /* 音频采样率 */
+ public int channels; /* 音频声道数 */
+ public int bits_per_sample; /* 音频采样精度 */
+
+ public int length; /* 音视频帧大小 */
+ public long timestamp_usec; /* 时间戳,微妙 */
+ public long timestamp_sec; /* 时间戳 秒 */
+
+ public long stamp;
+
+ public float bitrate; /* 比特率 */
+ public float losspacket; /* 丢包率 */
+
+ public byte[] buffer;
+ public int offset = 0;
+ public boolean audio;
+ }
+
+ public static final class MediaInfo {
+// Easy_U32 u32VideoCodec; /* ”∆µ±‡¬Î¿‡–Õ */
+// Easy_U32 u32VideoFps; /* ”∆µ÷°¬ */
+//
+// Easy_U32 u32AudioCodec; /* “Ù∆µ±‡¬Î¿‡–Õ */
+// Easy_U32 u32AudioSamplerate; /* “Ù∆µ≤…—˘¬ */
+// Easy_U32 u32AudioChannel; /* “Ù∆µÕ®µ¿ ˝ */
+// Easy_U32 u32AudioBitsPerSample; /* “Ù∆µ≤…—˘æ´∂» */
+//
+// Easy_U32 u32H264SpsLength; /* ”∆µsps÷°≥§∂» */
+// Easy_U32 u32H264PpsLength; /* ”∆µpps÷°≥§∂» */
+// Easy_U8 u8H264Sps[128]; /* ”∆µsps÷°ƒ⁄»› */
+// Easy_U8 u8H264Pps[36]; /* ”∆µsps÷°ƒ⁄»› */
+
+ int videoCodec;
+ int fps;
+ int audioCodec;
+ int sample;
+ int channel;
+ int bitPerSample;
+ int spsLen;
+ int ppsLen;
+ byte[] sps;
+ byte[] pps;
+
+ @Override
+ public String toString() {
+ return "MediaInfo{" +
+ "videoCodec=" + videoCodec +
+ ", fps=" + fps +
+ ", audioCodec=" + audioCodec +
+ ", sample=" + sample +
+ ", channel=" + channel +
+ ", bitPerSample=" + bitPerSample +
+ ", spsLen=" + spsLen +
+ ", ppsLen=" + ppsLen +
+ '}';
+ }
+ }
+
+ public interface SourceCallBack {
+ void onSourceCallBack(int _channelId, int _channelPtr, int _frameType, FrameInfo frameInfo);
+
+ void onMediaInfoCallBack(int _channelId, MediaInfo mi);
+
+ void onEvent(int _channelId, int err, int info);
+ }
+
+ public static final int EASY_SDK_VIDEO_FRAME_FLAG = 0x01;
+ public static final int EASY_SDK_AUDIO_FRAME_FLAG = 0x02;
+ public static final int EASY_SDK_EVENT_FRAME_FLAG = 0x04;
+ public static final int EASY_SDK_RTP_FRAME_FLAG = 0x08; /* RTP帧标志 */
+ public static final int EASY_SDK_SDP_FRAME_FLAG = 0x10; /* SDP帧标志 */
+ public static final int EASY_SDK_MEDIA_INFO_FLAG = 0x20; /* 媒体类型标志*/
+
+ public static final int EASY_SDK_EVENT_CODEC_ERROR = 0x63657272; /* ERROR */
+ public static final int EASY_SDK_EVENT_CODEC_EXIT = 0x65786974; /* EXIT */
+
+ public static final int TRANSTYPE_TCP = 1;
+ public static final int TRANSTYPE_UDP = 2;
+ private static final String TAG = Client.class.getSimpleName();
+
+ static {
+ System.loadLibrary("EasyRTSPClient");
+ }
+
+ private long mCtx;
+ private static final SparseArray sCallbacks = new SparseArray<>();
+
+ Client(Context context) {
+ if (context == null) {
+ throw new NullPointerException();
+ }
+
+ mCtx = init(context, "");
+ mContext = context.getApplicationContext();
+ }
+
+ int registerCallback(SourceCallBack cb) {
+ synchronized (sCallbacks) {
+ sCallbacks.put(++sKey, cb);
+ return sKey;
+ }
+ }
+
+ void unrigisterCallback(SourceCallBack cb) {
+ synchronized (sCallbacks) {
+ int idx = sCallbacks.indexOfValue(cb);
+ if (idx != -1) {
+ sCallbacks.removeAt(idx);
+ }
+ }
+ }
+
+ public int getLastErrorCode() {
+ return getErrorCode(mCtx);
+ }
+
+ public int openStream(int channel, String url, int type, int sendOption, int mediaType, String user, String pwd) {
+ _channel = channel;
+ _url = url;
+ _type = type;
+ _mediaType = mediaType;
+ _user = user;
+ _pwd = pwd;
+ _sendOption = sendOption;
+ return openStream();
+ }
+
+ public void closeStream() {
+ h.removeCallbacks(closeTask);
+
+ if (mCtx != 0) {
+ closeStream(mCtx);
+ }
+ }
+
+ private static native int getErrorCode(long context);
+
+ private native long init(Context context, String key);
+
+ private native int deInit(long context);
+
+ private int openStream() {
+ if (null == _url) {
+ throw new NullPointerException();
+ }
+
+ if (mCtx == 0) {
+ throw new IllegalStateException("初始化失败,KEY不合法");
+ }
+
+ return openStream(mCtx, _channel, _url, _type, _mediaType, _user, _pwd, 1000, 0, _sendOption);
+ }
+
+ private native int openStream(long context, int channel, String url, int type, int mediaType, String user, String pwd, int reconn, int outRtpPacket, int rtspOption);
+
+// private native int startRecord(int context, String path);
+//
+// private native void stopRecord(int context);
+
+ private native void closeStream(long context);
+
+ private static void save2path(byte[] buffer, int offset, int length, String path, boolean append) {
+ FileOutputStream fos = null;
+
+ try {
+ fos = new FileOutputStream(path, append);
+ fos.write(buffer, offset, length);
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ if (fos != null) {
+ try {
+ fos.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ private static void onSourceCallBack(int _channelId, int _channelPtr, int _frameType, byte[] pBuf, byte[] frameBuffer) {
+ if (BuildConfig.MEDIA_DEBUG) {
+
+ int permissionCheck = ContextCompat.checkSelfPermission(mContext, Manifest.permission.WRITE_EXTERNAL_STORAGE);
+ if (permissionCheck == PackageManager.PERMISSION_GRANTED) {
+ // frameType + size + buffer
+ if (_frameType != 0) {
+ ByteBuffer bf = ByteBuffer.allocate(5);
+ bf.put((byte) _frameType);
+ if (_frameType == EASY_SDK_MEDIA_INFO_FLAG) {
+ bf.putInt(pBuf.length);
+ save2path(bf.array(), 0, 5, "/sdcard/media_degbu.data", true);
+ save2path(pBuf, 0, pBuf.length, "/sdcard/media_degbu.data", true);
+ } else {
+ bf.putInt(frameBuffer.length);
+ save2path(bf.array(), 0, 5, "/sdcard/media_degbu.data", true);
+ save2path(frameBuffer, 0, frameBuffer.length, "/sdcard/media_degbu.data", true);
+ }
+ }
+ }
+ }
+
+ final SourceCallBack callBack;
+ synchronized (sCallbacks) {
+ callBack = sCallbacks.get(_channelId);
+ }
+
+ if (_frameType == 0) {
+ if (callBack != null) {
+ callBack.onSourceCallBack(_channelId, _channelPtr, _frameType, null);
+ }
+ return;
+ }
+
+ if (_frameType == EASY_SDK_MEDIA_INFO_FLAG) {
+ if (callBack != null) {
+ MediaInfo mi = new MediaInfo();
+
+ ByteBuffer buffer = ByteBuffer.wrap(pBuf);
+ buffer.order(ByteOrder.LITTLE_ENDIAN);
+ mi.videoCodec = buffer.getInt();
+ mi.fps = buffer.getInt();
+ mi.audioCodec = buffer.getInt();
+ mi.sample = buffer.getInt();
+ mi.channel = buffer.getInt();
+ mi.bitPerSample = buffer.getInt();
+ mi.spsLen = buffer.getInt();
+ mi.ppsLen = buffer.getInt();
+ mi.sps = new byte[128];
+ mi.pps = new byte[36];
+
+ buffer.get(mi.sps);
+ buffer.get(mi.pps);
+// int videoCodec;int fps;
+// int audioCodec;int sample;int channel;int bitPerSample;
+// int spsLen;
+// int ppsLen;
+// byte[]sps;
+// byte[]pps;
+
+ callBack.onMediaInfoCallBack(_channelId, mi);
+ }
+ return;
+ }
+
+ ByteBuffer buffer = ByteBuffer.wrap(frameBuffer);
+ buffer.order(ByteOrder.LITTLE_ENDIAN);
+ FrameInfo fi = new FrameInfo();
+ fi.codec = buffer.getInt();
+ fi.type = buffer.getInt();
+ fi.fps = buffer.get();
+ buffer.get();
+ fi.width = buffer.getShort();
+ fi.height = buffer.getShort();
+ buffer.getInt();
+ buffer.getInt();
+ buffer.getShort();
+ fi.sample_rate = buffer.getInt();
+ fi.channels = buffer.getInt();
+ fi.bits_per_sample = buffer.getInt();
+ fi.length = buffer.getInt();
+ fi.timestamp_usec = buffer.getInt();
+ fi.timestamp_sec = buffer.getInt();
+
+ long sec = fi.timestamp_sec < 0 ? Integer.MAX_VALUE - Integer.MIN_VALUE + 1 + fi.timestamp_sec : fi.timestamp_sec;
+ long usec = fi.timestamp_usec < 0 ? Integer.MAX_VALUE - Integer.MIN_VALUE + 1 + fi.timestamp_usec : fi.timestamp_usec;
+ fi.stamp = sec * 1000000 + usec;
+
+// long differ = fi.stamp - mPreviewStamp;
+// Log.d(TAG, String.format("%s:%d,%d,%d, %d", EASY_SDK_VIDEO_FRAME_FLAG == _frameType ? "视频" : "音频", fi.stamp, fi.timestamp_sec, fi.timestamp_usec, differ));
+ fi.buffer = pBuf;
+
+ boolean paused = false;
+ synchronized (_channelPause) {
+ paused = _channelPause.contains(_channelId);
+ }
+
+ if (callBack != null) {
+ if (paused) {
+ Log.i(TAG, "channel_" + _channelId + " is paused!");
+ }
+ callBack.onSourceCallBack(_channelId, _channelPtr, _frameType, fi);
+ }
+ }
+
+ private static void onEvent(int channel, int err, int state) {
+ // state:1 Connecting, 2 连接错误, 3 连接线程退出
+ // err的含义:http请求的返回码(200,400,401等等)
+ Log.e(TAG, String.format("__RTSPClientCallBack onEvent: err=%d, state=%d", err, state));
+
+ synchronized (sCallbacks) {
+ final SourceCallBack callBack = sCallbacks.get(channel);
+ if (callBack != null) {
+ callBack.onEvent(channel, err, state);
+ }
+ }
+ }
+
+ public void pause() {
+ if (Looper.myLooper() != Looper.getMainLooper()) {
+ throw new IllegalThreadStateException("please call pause in Main thread!");
+ }
+
+ synchronized (_channelPause) {
+ _channelPause.add(_channel);
+ }
+
+ paused = 1;
+ Log.i(TAG, "pause:=" + 1);
+ h.postDelayed(closeTask, 10000);
+ }
+
+ public void resume() {
+ if (Looper.myLooper() != Looper.getMainLooper()) {
+ throw new IllegalThreadStateException("call resume in Main thread!");
+ }
+
+ synchronized (_channelPause) {
+ _channelPause.remove(_channel);
+ }
+
+ h.removeCallbacks(closeTask);
+
+ if (paused == 2) {
+ Log.i(TAG, "resume:=" + 0);
+ openStream();
+ }
+
+ Log.i(TAG, "resume:=" + 0);
+ paused = 0;
+ }
+
+ @Override
+ public void close() throws IOException {
+ h.removeCallbacks(closeTask);
+ _channelPause.remove(_channel);
+
+ if (mCtx == 0)
+ throw new IOException("not opened or already closed");
+
+ deInit(mCtx);
+ mCtx = 0;
+ }
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/video/EasyMuxer.java b/library-rtsp/src/main/java/org/easydarwin/video/EasyMuxer.java
new file mode 100644
index 0000000..38dda84
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/video/EasyMuxer.java
@@ -0,0 +1,196 @@
+package org.easydarwin.video;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.os.SystemClock;
+import android.text.TextUtils;
+import android.util.Log;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.InvalidParameterException;
+
+import static android.media.MediaCodec.BUFFER_FLAG_KEY_FRAME;
+
+/**
+ * Created by John on 2017/1/10.
+ */
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class EasyMuxer {
+
+ public static final boolean VERBOSE = true;
+ private static final String TAG = EasyMuxer.class.getSimpleName();
+ private final String mFilePath;
+ private boolean hasAudio;
+ private MediaMuxer mMuxer;
+ private final long durationMillis;
+ private int index = 0;
+ private int mVideoTrackIndex = -1;
+ private int mAudioTrackIndex = -1;
+ private long mBeginMillis = 0L;
+ private MediaFormat mVideoFormat;
+ private MediaFormat mAudioFormat;
+
+
+ private long video_stample = 0;
+ private long audio_stample = 0;
+
+ public EasyMuxer(String path, boolean hasAudio, long durationMillis) {
+ if (TextUtils.isEmpty(path)){
+ throw new InvalidParameterException("path should not be empty!");
+ }
+ if (path.toLowerCase().endsWith(".mp4")){
+ path = path.substring(0, path.toLowerCase().lastIndexOf(".mp4"));
+ }
+ mFilePath = path;
+ this.hasAudio = hasAudio;
+ this.durationMillis = durationMillis;
+ Object mux = null;
+ try {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
+ mux = new MediaMuxer(path + "-" + index++ + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ mMuxer = (MediaMuxer) mux;
+ }
+ }
+ public synchronized void addTrack(MediaFormat format, boolean isVideo) {
+ // now that we have the Magic Goodies, start the muxer
+ if (mAudioTrackIndex != -1 && mVideoTrackIndex != -1)
+ throw new RuntimeException("already add all tracks");
+ int track = mMuxer.addTrack(format);
+ if (VERBOSE)
+ Log.i(TAG, String.format("addTrack %s result %d", isVideo ? "video" : "audio", track));
+ if (isVideo) {
+ mVideoFormat = format;
+ mVideoTrackIndex = track;
+ if (mAudioTrackIndex != -1 || !hasAudio) {
+ if (VERBOSE)
+ Log.i(TAG, "both audio and video added,and muxer is started");
+ mMuxer.start();
+ }
+ } else {
+ mAudioFormat = format;
+ mAudioTrackIndex = track;
+ if (mVideoTrackIndex != -1) {
+ mMuxer.start();
+ }
+ }
+ }
+
+ public synchronized void pumpStream(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo, boolean isVideo) {
+ if (mMuxer == null) Log.w(TAG,"muxer is null!");
+ if (mVideoTrackIndex == -1) {
+ Log.i(TAG, String.format("pumpStream [%s] but muxer is not start.ignore..", isVideo ? "video" : "audio"));
+ return;
+ }
+ if (mAudioTrackIndex == -1 && hasAudio) {
+ Log.i(TAG, String.format("pumpStream [%s] but muxer is not start.ignore..", isVideo ? "video" : "audio"));
+ return;
+ }
+ if (isVideo && mBeginMillis == 0L){ // 首帧需要是关键帧
+ if ((bufferInfo.flags & BUFFER_FLAG_KEY_FRAME) == 0){
+ Log.i(TAG, String.format("pumpStream [%s] but key frame not GOTTEN.ignore..", isVideo ? "video" : "audio"));
+ return;
+ }
+ }
+ if (!isVideo && mBeginMillis == 0L){
+ Log.i(TAG, String.format("pumpStream [%s] but video frame not GOTTEN.ignore..", isVideo ? "video" : "audio"));
+ return;
+ }
+ if (isVideo && mBeginMillis == 0L){
+ mBeginMillis = SystemClock.elapsedRealtime();
+ }
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // The codec config data was pulled out and fed to the muxer when we got
+ // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
+ } else if (bufferInfo.size != 0) {
+ if (isVideo && mVideoTrackIndex == -1) {
+ throw new InvalidParameterException("muxer hasn't started");
+ }
+
+ // adjust the ByteBuffer values to match BufferInfo (not needed?)
+ outputBuffer.position(bufferInfo.offset);
+ outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
+ if (VERBOSE)
+ Log.d(TAG, String.format("sent %s [" + bufferInfo.size + "] with timestamp:[%d] to muxer", isVideo ? "video" : "audio", bufferInfo.presentationTimeUs / 1000));
+
+ if (isVideo){
+ if (video_stample != 0){
+ if (bufferInfo.presentationTimeUs - video_stample <= 0){
+ Log.w(TAG,"video timestample goback, ignore!");
+ return;
+ }
+ video_stample = bufferInfo.presentationTimeUs;
+ }else{
+ video_stample = bufferInfo.presentationTimeUs;
+ }
+ }else {
+ if (audio_stample != 0){
+ if (bufferInfo.presentationTimeUs - audio_stample <= 0){
+ Log.w(TAG,"audio timestample goback, ignore!");
+ return;
+ }
+ audio_stample = bufferInfo.presentationTimeUs;
+ }else{
+ audio_stample = bufferInfo.presentationTimeUs;
+ }
+ }
+ mMuxer.writeSampleData(isVideo ? mVideoTrackIndex : mAudioTrackIndex, outputBuffer, bufferInfo);
+ }
+
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ if (VERBOSE)
+ Log.i(TAG, "BUFFER_FLAG_END_OF_STREAM received");
+ }
+
+ if (SystemClock.elapsedRealtime() - mBeginMillis >= durationMillis && isVideo && ((bufferInfo.flags & BUFFER_FLAG_KEY_FRAME) != 0)) {
+ if (VERBOSE)
+ Log.i(TAG, String.format("record file reach expiration.create new file:" + index));
+
+ try {
+ mMuxer.stop();
+ mMuxer.release();
+ }catch (Exception e){
+ e.printStackTrace();
+ }
+ mMuxer = null;
+ mVideoTrackIndex = mAudioTrackIndex = -1;
+ try {
+ mMuxer = new MediaMuxer(mFilePath + "-" + index++ + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+ addTrack(mVideoFormat, true);
+ if (mAudioFormat != null) {
+ addTrack(mAudioFormat, false);
+ }
+ mBeginMillis = 0L;
+ pumpStream(outputBuffer, bufferInfo, isVideo);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ }
+ }
+
+ public synchronized void release() {
+ if (mMuxer != null) {
+ if (mVideoTrackIndex != -1 && (mAudioTrackIndex != -1 || !hasAudio)) {
+ if (VERBOSE)
+ Log.i(TAG, String.format("muxer is started. now it will be stoped."));
+ try {
+ mMuxer.stop();
+ mMuxer.release();
+ } catch (IllegalStateException ex) {
+ ex.printStackTrace();
+ }
+ }
+ mMuxer = null;
+ }
+ mBeginMillis = 0L;
+ }
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/video/EasyMuxer2.java b/library-rtsp/src/main/java/org/easydarwin/video/EasyMuxer2.java
new file mode 100644
index 0000000..311926c
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/video/EasyMuxer2.java
@@ -0,0 +1,29 @@
+package org.easydarwin.video;
+
+import java.lang.annotation.Native;
+
+/**
+ * Created by John on 2017/1/10.
+ */
+
+public class EasyMuxer2 {
+
+ static {
+ System.loadLibrary("proffmpeg");
+ System.loadLibrary("VideoCodecer");
+ }
+ public static final int AVMEDIA_TYPE_VIDEO = 0;
+ public static final int AVMEDIA_TYPE_AUDIO = 1;
+
+
+ public static final int VIDEO_TYPE_H264 = 0;
+ public static final int VIDEO_TYPE_H265 = 1;
+ @Native
+ private long ctx;
+
+ public native int create(String path, int videoType, int width, int height, byte[] extra, int sample, int channel);
+
+ public native int writeFrame(int streamType, byte[] frame, int offset, int length, long timeStampMillis);
+
+ public native void close();
+}
diff --git a/library-rtsp/src/main/java/org/easydarwin/video/EasyPlayerClient.java b/library-rtsp/src/main/java/org/easydarwin/video/EasyPlayerClient.java
new file mode 100644
index 0000000..31bdad3
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/video/EasyPlayerClient.java
@@ -0,0 +1,1663 @@
+package org.easydarwin.video;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Process;
+import android.os.ResultReceiver;
+import android.os.SystemClock;
+import android.preference.PreferenceManager;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.Surface;
+import android.view.TextureView;
+
+import org.easydarwin.audio.AudioCodec;
+import org.easydarwin.audio.EasyAACMuxer;
+import org.easydarwin.sw.JNIUtil;
+import org.easydarwin.util.CodecSpecificDataUtil;
+import org.easydarwin.util.TextureLifecycler;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.security.InvalidParameterException;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.PriorityQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static android.media.AudioManager.AUDIOFOCUS_REQUEST_GRANTED;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar;
+import static org.easydarwin.util.CodecSpecificDataUtil.AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE;
+import static org.easydarwin.video.Client.TRANSTYPE_TCP;
+import static org.easydarwin.video.EasyMuxer2.VIDEO_TYPE_H264;
+import static org.easydarwin.video.EasyMuxer2.VIDEO_TYPE_H265;
+
+import androidx.lifecycle.Lifecycle;
+import androidx.lifecycle.LifecycleObserver;
+import androidx.lifecycle.LifecycleOwner;
+import androidx.lifecycle.OnLifecycleEvent;
+
+/**
+ * Created by John on 2016/3/17.
+ */
+public class EasyPlayerClient implements Client.SourceCallBack {
+ private static final long LEAST_FRAME_INTERVAL = 10000l;
+
+ /* 视频编码 */
+ public static final int EASY_SDK_VIDEO_CODEC_H264 = 0x1C; /* H264 */
+ public static final int EASY_SDK_VIDEO_CODEC_H265 = 0x48323635; /* H265 */
+ public static final int EASY_SDK_VIDEO_CODEC_MJPEG = 0x08; /* MJPEG */
+ public static final int EASY_SDK_VIDEO_CODEC_MPEG4 = 0x0D; /* MPEG4 */
+
+ /* 音频编码 */
+ public static final int EASY_SDK_AUDIO_CODEC_AAC = 0x15002; /* AAC */
+ public static final int EASY_SDK_AUDIO_CODEC_G711U = 0x10006; /* G711 ulaw */
+ public static final int EASY_SDK_AUDIO_CODEC_G711A = 0x10007; /* G711 alaw */
+ public static final int EASY_SDK_AUDIO_CODEC_G726 = 0x1100B; /* G726 */
+
+ /**
+ * 表示视频显示出来了
+ */
+ public static final int RESULT_VIDEO_DISPLAYED = 01;
+
+ /**
+ * 表示视频的解码方式
+ */
+ public static final String KEY_VIDEO_DECODE_TYPE = "video-decode-type";
+
+ /**
+ * 表示视频的尺寸获取到了。具体尺寸见 EXTRA_VIDEO_WIDTH、EXTRA_VIDEO_HEIGHT
+ */
+ public static final int RESULT_VIDEO_SIZE = 02;
+
+ public static final int RESULT_TIMEOUT = 03;
+
+ public static final int RESULT_EVENT = 04;
+ public static final int RESULT_UNSUPPORTED_VIDEO = 05;
+ public static final int RESULT_UNSUPPORTED_AUDIO = 06;
+ public static final int RESULT_RECORD_BEGIN = 7;
+ public static final int RESULT_RECORD_END = 8;
+
+ /**
+ * 表示第一帧数据已经收到
+ */
+ public static final int RESULT_FRAME_RECVED = 9;
+
+ private static final String TAG = EasyPlayerClient.class.getSimpleName();
+ /**
+ * 表示视频的宽度
+ */
+ public static final String EXTRA_VIDEO_WIDTH = "extra-video-width";
+ /**
+ * 表示视频的高度
+ */
+ public static final String EXTRA_VIDEO_HEIGHT = "extra-video-height";
+
+ private static final int NAL_VPS = 32;
+ private static final int NAL_SPS = 33;
+ private static final int NAL_PPS = 34;
+
+ private Surface mSurface;
+ private final TextureLifecycler lifecycler;
+ private volatile Thread mThread, mAudioThread;
+ private final ResultReceiver mRR;
+ private Client mClient;
+ private boolean mAudioEnable = true;
+ private volatile long mReceivedDataLength;
+ private AudioTrack mAudioTrack;
+ private String mRecordingPath;
+ private EasyAACMuxer mObject;
+ private EasyMuxer2 muxer2;
+ private Client.MediaInfo mMediaInfo;
+ private short mHeight = 0;
+ short mWidth = 0;
+ private ByteBuffer mCSD0;
+ private ByteBuffer mCSD1;
+ private final I420DataCallback i420callback;
+ private boolean mMuxerWaitingKeyVideo;
+
+ /**
+ * -1 表示暂停中,0表示正常录像中,1表示恢复中。
+ */
+ private int mRecordingStatus;
+ private long muxerPausedMillis = 0L;
+ private long mMuxerCuttingMillis = 0L;
+
+// private RtmpClient mRTMPClient = new RtmpClient();
+
+ public boolean isRecording() {
+ return !TextUtils.isEmpty(mRecordingPath);
+ }
+
+ private static class FrameInfoQueue extends PriorityQueue {
+ public static final int CAPACITY = 500;
+ public static final int INITIAL_CAPACITY = 300;
+
+ public FrameInfoQueue() {
+ super(INITIAL_CAPACITY, new Comparator() {
+ @Override
+ public int compare(Client.FrameInfo frameInfo, Client.FrameInfo t1) {
+ return (int) (frameInfo.stamp - t1.stamp);
+ }
+ });
+ }
+
+ final ReentrantLock lock = new ReentrantLock();
+ final Condition notFull = lock.newCondition();
+ final Condition notVideo = lock.newCondition();
+ final Condition notAudio = lock.newCondition();
+
+ @Override
+ public int size() {
+ lock.lock();
+ try {
+ return super.size();
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ @Override
+ public void clear() {
+ lock.lock();
+ try {
+ int size = super.size();
+ super.clear();
+ int k = size;
+
+ for (; k > 0 && lock.hasWaiters(notFull); k--) {
+ notFull.signal();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ public void put(Client.FrameInfo x) throws InterruptedException {
+ lock.lockInterruptibly();
+
+ try {
+ int size;
+ while ((size = super.size()) == CAPACITY) {
+ Log.v(TAG, "queue full:" + CAPACITY);
+ notFull.await();
+ }
+
+ offer(x);
+// Log.d(TAG, String.format("queue size : " + size));
+ // 这里是乱序的。并非只有空的queue才丢到首位。因此不能做限制 if (size == 0)
+ {
+
+ if (x.audio) {
+ notAudio.signal();
+ } else {
+ notVideo.signal();
+ }
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ public Client.FrameInfo takeVideoFrame() throws InterruptedException {
+ lock.lockInterruptibly();
+
+ try {
+ while (true) {
+ Client.FrameInfo x = peek();
+
+ if (x == null) {
+ notVideo.await();
+ } else {
+ if (!x.audio) {
+ remove();
+ notFull.signal();
+ notAudio.signal();
+ return x;
+ } else {
+ notVideo.await();
+ }
+ }
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ public Client.FrameInfo takeVideoFrame(long ms) throws InterruptedException {
+ lock.lockInterruptibly();
+
+ try {
+ while (true) {
+ Client.FrameInfo x = peek();
+ if (x == null) {
+ if (!notVideo.await(ms, TimeUnit.MILLISECONDS)) return null;
+ } else {
+ if (!x.audio) {
+ remove();
+ notFull.signal();
+ notAudio.signal();
+ return x;
+ } else {
+ notVideo.await();
+ }
+ }
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ public Client.FrameInfo takeAudioFrame() throws InterruptedException {
+ lock.lockInterruptibly();
+
+ try {
+ while (true) {
+ Client.FrameInfo x = peek();
+ if (x == null) {
+ notAudio.await();
+ } else {
+ if (x.audio) {
+ remove();
+ notFull.signal();
+ notVideo.signal();
+ return x;
+ } else {
+ notAudio.await();
+ }
+ }
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+ }
+
+ private FrameInfoQueue mQueue = new FrameInfoQueue();
+
+ private final Context mContext;
+
+ /**
+ * 最新的视频时间戳
+ */
+ private volatile long mNewestStample;
+ private boolean mWaitingKeyFrame;
+ private boolean mTimeout;
+ private boolean mNotSupportedVideoCB, mNotSupportedAudioCB;
+
+ /**
+ * 创建SDK对象
+ *
+ * @param context 上下文对象
+ * @param surface 显示视频用的surface
+ */
+ public EasyPlayerClient(Context context, Surface surface, ResultReceiver receiver) {
+ this(context, surface, receiver, null);
+ }
+
+ /**
+ * 创建SDK对象
+ *
+ * @param context 上下文对象
+ * @param surface 显示视频用的surface
+ */
+ public EasyPlayerClient(Context context, Surface surface, ResultReceiver receiver, I420DataCallback callback) {
+ mSurface = surface;
+ mContext = context;
+ mRR = receiver;
+ i420callback = callback;
+ lifecycler = null;
+ }
+
+ public EasyPlayerClient(Context context, final TextureView view, ResultReceiver receiver, I420DataCallback callback) {
+ lifecycler = new TextureLifecycler(view);
+ mContext = context;
+ mRR = receiver;
+ i420callback = callback;
+
+ LifecycleObserver observer1 = new LifecycleObserver() {
+ @OnLifecycleEvent(value = Lifecycle.Event.ON_DESTROY)
+ public void destory() {
+ stop();
+ mSurface.release();
+ mSurface = null;
+ }
+
+ @OnLifecycleEvent(value = Lifecycle.Event.ON_CREATE)
+ private void create() {
+ mSurface = new Surface(view.getSurfaceTexture());
+ }
+ };
+
+ lifecycler.getLifecycle().addObserver(observer1);
+
+ if (context instanceof LifecycleOwner) {
+ LifecycleObserver observer = new LifecycleObserver() {
+ @OnLifecycleEvent(value = Lifecycle.Event.ON_DESTROY)
+ public void destory() {
+ stop();
+ }
+
+ @OnLifecycleEvent(value = Lifecycle.Event.ON_PAUSE)
+ private void pause() {
+ EasyPlayerClient.this.pause();
+ }
+
+ @OnLifecycleEvent(value = Lifecycle.Event.ON_RESUME)
+ private void resume() {
+ EasyPlayerClient.this.resume();
+ }
+ };
+
+ ((LifecycleOwner) context).getLifecycle().addObserver(observer);
+ }
+ }
+
+ /**
+ * 启动播放
+ *
+ * @param url
+ * @return
+ */
+ public void play(final String url) {
+ if (lifecycler.getLifecycle().getCurrentState().isAtLeast(Lifecycle.State.CREATED)) {
+ start(url, TRANSTYPE_TCP, 0, Client.EASY_SDK_VIDEO_FRAME_FLAG | Client.EASY_SDK_AUDIO_FRAME_FLAG, "", "", null);
+ } else {
+ lifecycler.getLifecycle().addObserver(new LifecycleObserver() {
+ @OnLifecycleEvent(value = Lifecycle.Event.ON_CREATE)
+ void create() {
+ start(url, TRANSTYPE_TCP, 0, Client.EASY_SDK_VIDEO_FRAME_FLAG | Client.EASY_SDK_AUDIO_FRAME_FLAG, "", "", null);
+ }
+ });
+ }
+ }
+
+ /**
+ * 启动播放
+ *
+ * @param url
+ * @param type
+ * @param sendOption
+ * @param mediaType
+ * @param user
+ * @param pwd
+ * @return
+ */
+ public int start(final String url, int type, int sendOption, int mediaType, String user, String pwd) {
+ return start(url, type, sendOption, mediaType, user, pwd, null);
+ }
+
+ /**
+ * 启动播放
+ *
+ * @param url
+ * @param type
+ * @param sendOption
+ * @param mediaType
+ * @param user
+ * @param pwd
+ * @return
+ */
+ public int start(final String url, int type, int sendOption, int mediaType, String user, String pwd, String recordPath) {
+ if (url == null) {
+ throw new NullPointerException("url is null");
+ }
+ if (type == 0)
+ type = TRANSTYPE_TCP;
+ mNewestStample = 0;
+ mWaitingKeyFrame = PreferenceManager.getDefaultSharedPreferences(mContext).getBoolean("waiting_i_frame", true);
+ mWidth = mHeight = 0;
+ mQueue.clear();
+ startCodec();
+ startAudio();
+ mTimeout = false;
+ mNotSupportedVideoCB = mNotSupportedAudioCB = false;
+ mReceivedDataLength = 0;
+ mClient = new Client(mContext);
+ int channel = mClient.registerCallback(this);
+ mRecordingPath = recordPath;
+ Log.i(TAG, String.format("playing url:\n%s\n", url));
+ return mClient.openStream(channel, url, type, sendOption, mediaType, user, pwd);
+ }
+
+ public boolean isAudioEnable() {
+ return mAudioEnable;
+ }
+
+ public void setAudioEnable(boolean enable) {
+ mAudioEnable = enable;
+ AudioTrack at = mAudioTrack;
+ if (at != null) {
+ Log.i(TAG, String.format("audio will be %s", enable ? "enabled" : "disabled"));
+ synchronized (at) {
+ if (!enable) {
+ at.pause();
+ at.flush();
+ } else {
+ at.flush();
+ at.play();
+ }
+ }
+ }
+ }
+
+ public static interface I420DataCallback {
+ public void onI420Data(ByteBuffer buffer);
+ }
+
+ public void pause() {
+ mQueue.clear();
+ if (mClient != null) {
+ mClient.pause();
+ }
+ mQueue.clear();
+ }
+
+ public void resume() {
+ if (mClient != null) {
+ mClient.resume();
+ }
+ }
+
+ /**
+ * 终止播放
+ */
+ public void stop() {
+ Thread t = mThread;
+ mThread = null;
+ if (t != null) {
+ t.interrupt();
+ try {
+ t.join();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+ t = mAudioThread;
+ mAudioThread = null;
+ if (t != null) {
+ t.interrupt();
+ try {
+ t.join();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ stopRecord();
+
+ mQueue.clear();
+ if (mClient != null) {
+ mClient.unrigisterCallback(this);
+ mClient.closeStream();
+ try {
+ mClient.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ mQueue.clear();
+ mClient = null;
+ mNewestStample = 0;
+ }
+
+ public long receivedDataLength() {
+ return mReceivedDataLength;
+ }
+
+ private void startAudio() {
+ mAudioThread = new Thread("AUDIO_CONSUMER") {
+
+ @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
+ @Override
+ public void run() {
+ {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
+ Client.FrameInfo frameInfo;
+ long handle = 0;
+ final AudioManager am = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
+ AudioManager.OnAudioFocusChangeListener l = new AudioManager.OnAudioFocusChangeListener() {
+ @Override
+ public void onAudioFocusChange(int focusChange) {
+ if (focusChange == AudioManager.AUDIOFOCUS_GAIN) {
+ AudioTrack audioTrack = mAudioTrack;
+ if (audioTrack != null) {
+ audioTrack.setStereoVolume(1.0f, 1.0f);
+ if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PAUSED) {
+ audioTrack.flush();
+ audioTrack.play();
+ }
+ }
+ } else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) {
+ AudioTrack audioTrack = mAudioTrack;
+ if (audioTrack != null) {
+ if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
+ audioTrack.pause();
+ }
+ }
+ } else if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) {
+ AudioTrack audioTrack = mAudioTrack;
+ if (audioTrack != null) {
+ audioTrack.setStereoVolume(0.5f, 0.5f);
+ }
+ }
+ }
+ };
+ try {
+ int requestCode = am.requestAudioFocus(l, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN);
+ if (requestCode != AUDIOFOCUS_REQUEST_GRANTED) {
+ return;
+ }
+ do {
+ frameInfo = mQueue.takeAudioFrame();
+ if (mMediaInfo != null) break;
+ } while (true);
+ final Thread t = Thread.currentThread();
+
+ if (mAudioTrack == null) {
+ int sampleRateInHz = (int) (mMediaInfo.sample * 1.001);
+ int channelConfig = mMediaInfo.channel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
+ int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ int bfSize = AudioTrack.getMinBufferSize(mMediaInfo.sample, channelConfig, audioFormat) * 8;
+ mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, audioFormat, bfSize, AudioTrack.MODE_STREAM);
+ }
+ mAudioTrack.play();
+ handle = AudioCodec.create(frameInfo.codec, frameInfo.sample_rate, frameInfo.channels, frameInfo.bits_per_sample);
+
+ Log.w(TAG, String.format("POST VIDEO_DISPLAYED IN AUDIO THREAD!!!"));
+ ResultReceiver rr = mRR;
+ if (rr != null) rr.send(RESULT_VIDEO_DISPLAYED, null);
+
+ // 半秒钟的数据缓存
+ byte[] mBufferReuse = new byte[16000];
+ int[] outLen = new int[1];
+ while (mAudioThread != null) {
+ if (frameInfo == null) {
+ frameInfo = mQueue.takeAudioFrame();
+ }
+ if (frameInfo.codec == EASY_SDK_AUDIO_CODEC_AAC && false) {
+ pumpAACSample(frameInfo);
+ }
+ outLen[0] = mBufferReuse.length;
+ long ms = SystemClock.currentThreadTimeMillis();
+ int nRet = AudioCodec.decode(handle, frameInfo.buffer, 0, frameInfo.length, mBufferReuse, outLen);
+ if (nRet == 0) {
+// if (frameInfo.codec != EASY_SDK_AUDIO_CODEC_AAC )
+ {
+// save2path(mBufferReuse, 0, outLen[0],"/sdcard/111.pcm", true);
+ pumpPCMSample(mBufferReuse, outLen[0], frameInfo.stamp);
+ }
+ if (mAudioEnable)
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ mAudioTrack.write(mBufferReuse, 0, outLen[0], AudioTrack.WRITE_NON_BLOCKING);
+ } else {
+ mAudioTrack.write(mBufferReuse, 0, outLen[0]);
+ }
+
+ }
+ frameInfo = null;
+ }
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ } finally {
+ am.abandonAudioFocus(l);
+ if (handle != 0) {
+ AudioCodec.close(handle);
+ }
+ AudioTrack track = mAudioTrack;
+ if (track != null) {
+ synchronized (track) {
+ mAudioTrack = null;
+ track.release();
+ }
+ }
+ }
+ }
+ }
+ };
+
+ mAudioThread.start();
+ }
+
+ private static void save2path(byte[] buffer, int offset, int length, String path, boolean append) {
+ FileOutputStream fos = null;
+ try {
+ fos = new FileOutputStream(path, append);
+ fos.write(buffer, offset, length);
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ if (fos != null) {
+ try {
+ fos.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ private static int getXPS(byte[] data, int offset, int length, byte[] dataOut, int[] outLen, int type) {
+ int i;
+ int pos0;
+ int pos1;
+ pos0 = -1;
+ length = Math.min(length, data.length);
+ for (i = offset; i < length - 4; i++) {
+ if ((0 == data[i]) && (0 == data[i + 1]) && (1 == data[i + 2]) && (type == (0x0F & data[i + 3]))) {
+ pos0 = i;
+ break;
+ }
+ }
+ if (-1 == pos0) {
+ return -1;
+ }
+ if (pos0 > 0 && data[pos0 - 1] == 0) { // 0 0 0 1
+ pos0 = pos0 - 1;
+ }
+ pos1 = -1;
+ for (i = pos0 + 4; i < length - 4; i++) {
+ if ((0 == data[i]) && (0 == data[i + 1]) && (1 == data[i + 2])) {
+ pos1 = i;
+ break;
+ }
+ }
+ if (-1 == pos1 || pos1 == 0) {
+ return -2;
+ }
+ if (data[pos1 - 1] == 0) {
+ pos1 -= 1;
+ }
+ if (pos1 - pos0 > outLen[0]) {
+ return -3; // 输入缓冲区太小
+ }
+ dataOut[0] = 0;
+ System.arraycopy(data, pos0, dataOut, 0, pos1 - pos0);
+ // memcpy(pXPS+1, pES+pos0, pos1-pos0);
+ // *pMaxXPSLen = pos1-pos0+1;
+ outLen[0] = pos1 - pos0;
+ return pos1;
+ }
+
+ private static byte[] getvps_sps_pps(byte[] data, int offset, int length) {
+ int i = 0;
+ int vps = -1, sps = -1, pps = -1;
+ length = Math.min(length, data.length);
+ do {
+ if (vps == -1) {
+ for (i = offset; i < length - 4; i++) {
+ if ((0x00 == data[i]) && (0x00 == data[i + 1]) && (0x01 == data[i + 2])) {
+ byte nal_spec = data[i + 3];
+ int nal_type = (nal_spec >> 1) & 0x03f;
+ if (nal_type == NAL_VPS) {
+ // vps found.
+ if (data[i - 1] == 0x00) { // start with 00 00 00 01
+ vps = i - 1;
+ } else { // start with 00 00 01
+ vps = i;
+ }
+ break;
+ }
+ }
+ }
+ }
+ if (sps == -1) {
+ for (i = vps; i < length - 4; i++) {
+ if ((0x00 == data[i]) && (0x00 == data[i + 1]) && (0x01 == data[i + 2])) {
+ byte nal_spec = data[i + 3];
+ int nal_type = (nal_spec >> 1) & 0x03f;
+ if (nal_type == NAL_SPS) {
+ // vps found.
+ if (data[i - 1] == 0x00) { // start with 00 00 00 01
+ sps = i - 1;
+ } else { // start with 00 00 01
+ sps = i;
+ }
+ break;
+ }
+ }
+ }
+ }
+ if (pps == -1) {
+ for (i = sps; i < length - 4; i++) {
+ if ((0x00 == data[i]) && (0x00 == data[i + 1]) && (0x01 == data[i + 2])) {
+ byte nal_spec = data[i + 3];
+ int nal_type = (nal_spec >> 1) & 0x03f;
+ if (nal_type == NAL_PPS) {
+ // vps found.
+ if (data[i - 1] == 0x00) { // start with 00 00 00 01
+ pps = i - 1;
+ } else { // start with 00 00 01
+ pps = i;
+ }
+ break;
+ }
+ }
+ }
+ }
+ } while (vps == -1 || sps == -1 || pps == -1);
+ if (vps == -1 || sps == -1 || pps == -1) {// 没有获取成功。
+ return null;
+ }
+ // 计算csd buffer的长度。即从vps的开始到pps的结束的一段数据
+ int begin = vps;
+ int end = -1;
+ for (i = pps + 4; i < length - 4; i++) {
+ if ((0x00 == data[i]) && (0x00 == data[i + 1]) && (0x01 == data[i + 2])) {
+ if (data[i - 1] == 0x00) { // start with 00 00 00 01
+ end = i - 1;
+ } else { // start with 00 00 01
+ end = i;
+ }
+ break;
+ }
+ }
+ if (end == -1 || end < begin) {
+ return null;
+ }
+ // 拷贝并返回
+ byte[] buf = new byte[end - begin];
+ System.arraycopy(data, begin, buf, 0, buf.length);
+ return buf;
+ }
+
+ private static boolean codecMatch(String mimeType, MediaCodecInfo codecInfo) {
+ String[] types = codecInfo.getSupportedTypes();
+ for (String type : types) {
+ if (type.equalsIgnoreCase(mimeType)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private static String codecName() {
+ ArrayList array = new ArrayList<>();
+ int numCodecs = MediaCodecList.getCodecCount();
+ for (int i1 = 0; i1 < numCodecs; i1++) {
+ MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i1);
+ if (codecInfo.isEncoder()) {
+ continue;
+ }
+
+ if (codecMatch("video/avc", codecInfo)) {
+ String name = codecInfo.getName();
+ Log.d("DECODER", String.format("decoder:%s", name));
+ array.add(name);
+ }
+ }
+// if (array.remove("OMX.qcom.video.decoder.avc")) {
+// array.add("OMX.qcom.video.decoder.avc");
+// }
+// if (array.remove("OMX.amlogic.avc.decoder.awesome")) {
+// array.add("OMX.amlogic.avc.decoder.awesome");
+// }
+ if (array.isEmpty()) {
+ return "";
+ }
+ return array.get(0);
+ }
+
+ private static MediaCodecInfo selectCodec(String mimeType) {
+ int numCodecs = MediaCodecList.getCodecCount();
+ for (int i = 0; i < numCodecs; i++) {
+ MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
+
+ if (codecInfo.isEncoder()) {
+ continue;
+ }
+
+ String[] types = codecInfo.getSupportedTypes();
+ for (int j = 0; j < types.length; j++) {
+ if (types[j].equalsIgnoreCase(mimeType)) {
+ return codecInfo;
+ }
+ }
+ }
+ return null;
+ }
+
+ private void startCodec() {
+ mThread = new Thread("VIDEO_CONSUMER") {
+
+ @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
+ @Override
+ public void run() {
+ Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
+ MediaCodec mCodec = null;
+ int mColorFormat = 0;
+ VideoCodec.VideoDecoderLite mDecoder = null, displayer = null;
+
+ try {
+ boolean pushBlankBuffersOnStop = true;
+
+ int index = 0;
+ // previous
+ long previousStampUs = 0l;
+ long lastFrameStampUs = 0l;
+ long differ = 0;
+ int realWidth = mWidth;
+ int realHeight = mHeight;
+ int sliceHeight = realHeight;
+
+ int frameWidth = 0;
+ int frameHeight = 0;
+//
+// long decodeBegin = 0;
+// long current = 0;
+
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+
+ Client.FrameInfo initFrameInfo = null;
+ Client.FrameInfo frameInfo = null;
+
+ while (mThread != null) {
+ if (mCodec == null && mDecoder == null) {
+ if (frameInfo == null) {
+ frameInfo = mQueue.takeVideoFrame();
+ }
+
+ initFrameInfo = frameInfo;
+
+ try {
+ if (PreferenceManager.getDefaultSharedPreferences(mContext).getBoolean("use-sw-codec", false)) {
+ throw new IllegalStateException("user set sw codec");
+ }
+
+ final String mime = frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264 ? "video/avc" : "video/hevc";
+ MediaFormat format = MediaFormat.createVideoFormat(mime, mWidth, mHeight);
+ format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
+ format.setInteger(MediaFormat.KEY_PUSH_BLANK_BUFFERS_ON_STOP, pushBlankBuffersOnStop ? 1 : 0);
+ // 指定解码后的帧格式
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
+
+ if (mCSD0 != null) {
+ format.setByteBuffer("csd-0", mCSD0);
+ } else {
+ throw new InvalidParameterException("csd-0 is invalid.");
+ }
+
+ if (mCSD1 != null) {
+ format.setByteBuffer("csd-1", mCSD1);
+ } else {
+ if (frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264)
+ throw new InvalidParameterException("csd-1 is invalid.");
+ }
+
+ MediaCodecInfo ci = selectCodec(mime);
+ mColorFormat = CodecSpecificDataUtil.selectColorFormat(ci, mime);
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ MediaCodecInfo.CodecCapabilities capabilities = ci.getCapabilitiesForType(mime);
+ MediaCodecInfo.VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
+ boolean supported = videoCapabilities.isSizeSupported(mWidth, mHeight);
+ Log.i(TAG, "media codec " + ci.getName() + (supported ? "support" : "not support") + mWidth + "*" + mHeight);
+ if (!supported) {
+ boolean b1 = videoCapabilities.getSupportedWidths().contains(mWidth + 0);
+ boolean b2 = videoCapabilities.getSupportedHeights().contains(mHeight + 0);
+ supported |= b1 && b2;
+ if (supported) {
+ Log.w(TAG, ".......................................................................");
+ } else {
+ throw new IllegalStateException("media codec " + ci.getName() + (supported ? "support" : "not support") + mWidth + "*" + mHeight);
+ }
+ }
+ }
+ Log.i(TAG, String.format("config codec:%s", format));
+
+ MediaCodec codec = MediaCodec.createByCodecName(ci.getName());
+ codec.configure(format, i420callback != null ? null : mSurface, null, 0);
+ codec.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
+ codec.start();
+
+ mCodec = codec;
+ if (i420callback != null) {
+ final VideoCodec.VideoDecoderLite decoder = new VideoCodec.VideoDecoderLite();
+ decoder.create(mSurface, frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264);
+ displayer = decoder;
+ }
+ } catch (Throwable e) {
+ if (mCodec != null) {
+ mCodec.release();
+ }
+ mCodec = null;
+
+ if (displayer != null) {
+ displayer.close();
+ Log.i(TAG, "AAAA 958 displayer.close()");
+ }
+
+ displayer = null;
+
+ Log.e(TAG, String.format("init codec error due to %s", e.getMessage()));
+ e.printStackTrace();
+
+ final VideoCodec.VideoDecoderLite decoder = new VideoCodec.VideoDecoderLite();
+ decoder.create(mSurface, frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264);
+ mDecoder = decoder;
+ }
+
+// previewTickUs = mTexture.getTimestamp();
+// differ = previewTickUs - frameInfo.stamp;
+// index = mCodec.dequeueInputBuffer(0);
+// if (index >= 0) {
+// ByteBuffer buffer = mCodec.getInputBuffers()[index];
+// buffer.clear();
+// mCSD0.clear();
+// mCSD1.clear();
+// buffer.put(mCSD0.array(), 0, mCSD0.remaining());
+// buffer.put(mCSD1.array(), 0, mCSD1.remaining());
+// mCodec.queueInputBuffer(index, 0, buffer.position(), 0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
+// }
+ } else {
+ frameInfo = mQueue.takeVideoFrame(5);
+ }
+
+ if (frameInfo != null) {
+ Log.d(TAG, "video " + frameInfo.stamp + " take[" + (frameInfo.stamp - lastFrameStampUs) + "]");
+ if (frameHeight != 0 && frameWidth != 0) {
+ if (frameInfo.width != 0 && frameInfo.height != 0) {
+ if (frameInfo.width != frameWidth || frameInfo.height != frameHeight) {
+ frameHeight = frameInfo.height;
+ frameWidth = frameInfo.width;
+ stopRecord();
+ if (mCodec != null) {
+ mCodec.release();
+ mCodec = null;
+ continue;
+ }
+ }
+ }
+ }
+ frameHeight = frameInfo.height;
+ frameWidth = frameInfo.width;
+ pumpVideoSample(frameInfo);
+ lastFrameStampUs = frameInfo.stamp;
+ }
+
+ do {
+ if (mDecoder != null) {
+ if (frameInfo != null) {
+ long decodeBegin = SystemClock.elapsedRealtime();
+ int[] size = new int[2];
+
+// mDecoder.decodeFrame(frameInfo, size);
+ ByteBuffer buf = mDecoder.decodeFrameYUV(frameInfo, size);
+
+ if (i420callback != null && buf != null) {
+ i420callback.onI420Data(buf);
+ }
+
+ if (buf != null) {
+ mDecoder.releaseBuffer(buf);
+ Log.i(TAG, "AAAA 1022 releaseBuffer ");
+ }
+
+ long decodeSpend = SystemClock.elapsedRealtime() - decodeBegin;
+
+ boolean firstFrame = previousStampUs == 0l;
+ if (firstFrame) {
+ Log.i(TAG, String.format("POST VIDEO_DISPLAYED!!!"));
+ ResultReceiver rr = mRR;
+ if (rr != null) {
+ Bundle data = new Bundle();
+ data.putInt(KEY_VIDEO_DECODE_TYPE, 0);
+ rr.send(RESULT_VIDEO_DISPLAYED, data);
+ }
+ }
+
+ //Log.d(TAG, String.format("timestamp=%d diff=%d",current, current - previousStampUs ));
+
+ if (previousStampUs != 0l) {
+ long sleepTime = frameInfo.stamp - previousStampUs - decodeSpend * 1000;
+ if (sleepTime > 100000) {
+ Log.w(TAG, "sleep time.too long:" + sleepTime);
+ sleepTime = 100000;
+ }
+ if (sleepTime > 0) {
+ sleepTime %= 100000;
+ long cache = mNewestStample - frameInfo.stamp;
+ sleepTime = fixSleepTime(sleepTime, cache, 50000);
+ if (sleepTime > 0) {
+ Thread.sleep(sleepTime / 1000);
+ }
+ Log.d(TAG, "cache:" + cache);
+ }
+ }
+ previousStampUs = frameInfo.stamp;
+ }
+ } else {
+ try {
+ do {
+ if (frameInfo != null) {
+ byte[] pBuf = frameInfo.buffer;
+ index = mCodec.dequeueInputBuffer(10);
+ if (false)
+ throw new IllegalStateException("fake state");
+ if (index >= 0) {
+ ByteBuffer buffer = mCodec.getInputBuffers()[index];
+ buffer.clear();
+ if (pBuf.length > buffer.remaining()) {
+ mCodec.queueInputBuffer(index, 0, 0, frameInfo.stamp, 0);
+ } else {
+ buffer.put(pBuf, frameInfo.offset, frameInfo.length);
+ mCodec.queueInputBuffer(index, 0, buffer.position(), frameInfo.stamp + differ, 0);
+ }
+ frameInfo = null;
+ }
+ }
+ index = mCodec.dequeueOutputBuffer(info, 10); //
+ switch (index) {
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ Log.i(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
+ break;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ MediaFormat mf = mCodec.getOutputFormat();
+ Log.i(TAG, "INFO_OUTPUT_FORMAT_CHANGED :" + mf);
+
+ int width = mf.getInteger(MediaFormat.KEY_WIDTH);
+ if (mf.containsKey("crop-left") && mf.containsKey("crop-right")) {
+ width = mf.getInteger("crop-right") + 1 - mf.getInteger("crop-left");
+ }
+
+ int height = mf.getInteger(MediaFormat.KEY_HEIGHT);
+ if (mf.containsKey("crop-top") && mf.containsKey("crop-bottom")) {
+ height = mf.getInteger("crop-bottom") + 1 - mf.getInteger("crop-top");
+ }
+
+ realWidth = width;
+ realHeight = height;
+
+ if (mf.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) {
+ sliceHeight = mf.getInteger(MediaFormat.KEY_SLICE_HEIGHT);
+ } else {
+ sliceHeight = realHeight;
+ }
+ break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ // 输出为空
+ break;
+ default:
+ // 输出队列不为空
+ // -1表示为第一帧数据
+ long newSleepUs = -1;
+ boolean firstTime = previousStampUs == 0l;
+ if (!firstTime) {
+ long sleepUs = (info.presentationTimeUs - previousStampUs);
+ if (sleepUs > 100000) {
+ // 时间戳异常,可能服务器丢帧了。
+ Log.w(TAG, "sleep time.too long:" + sleepUs);
+ sleepUs = 100000;
+ } else if (sleepUs < 0) {
+ Log.w(TAG, "sleep time.too short:" + sleepUs);
+ sleepUs = 0;
+ }
+
+ {
+ long cache = mNewestStample - lastFrameStampUs;
+ newSleepUs = fixSleepTime(sleepUs, cache, 100000);
+ // Log.d(TAG, String.format("sleepUs:%d,newSleepUs:%d,Cache:%d", sleepUs, newSleepUs, cache));
+ }
+ }
+
+ //previousStampUs = info.presentationTimeUs;
+ ByteBuffer outputBuffer;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ outputBuffer = mCodec.getOutputBuffer(index);
+ } else {
+ outputBuffer = mCodec.getOutputBuffers()[index];
+ }
+
+ if (i420callback != null && outputBuffer != null) {
+ if (sliceHeight != realHeight) {
+ ByteBuffer tmp = ByteBuffer.allocateDirect(realWidth * realHeight * 3 / 2);
+ outputBuffer.clear();
+ outputBuffer.limit(realWidth * realHeight);
+ tmp.put(outputBuffer);
+
+ outputBuffer.clear();
+ outputBuffer.position(realWidth * sliceHeight);
+ outputBuffer.limit((realWidth * sliceHeight + realWidth * realHeight / 4));
+ tmp.put(outputBuffer);
+
+ outputBuffer.clear();
+ outputBuffer.position(realWidth * sliceHeight + realWidth * realHeight / 4);
+ outputBuffer.limit((realWidth * sliceHeight + realWidth * realHeight / 4 + realWidth * realHeight / 4));
+ tmp.put(outputBuffer);
+
+ tmp.clear();
+ outputBuffer = tmp;
+ }
+
+ if (mColorFormat == COLOR_FormatYUV420SemiPlanar
+ || mColorFormat == COLOR_FormatYUV420PackedSemiPlanar
+ || mColorFormat == COLOR_TI_FormatYUV420PackedSemiPlanar) {
+
+ byte[] in = new byte[realWidth * realHeight * 3 / 2];
+ outputBuffer.clear();
+ outputBuffer.get(in);
+
+ // yuvuv_to_yuv
+ JNIUtil.yuvConvert(in, realWidth, realHeight, 4);
+// // 旋转90或180或270度
+// yuvRotate(in, 0, realWidth, realHeight, 90);
+
+ ByteBuffer tmp = ByteBuffer.allocateDirect(realWidth * realHeight * 3 / 2);
+ tmp.clear();
+ tmp.put(in);
+
+ i420callback.onI420Data(tmp);
+
+ // 旋转90或270度,则宽高需要互换
+ displayer.decoder_decodeBuffer(tmp, realWidth, realHeight);
+ }
+ }
+
+ //previewStampUs = info.presentationTimeUs;
+ if (false && Build.VERSION.SDK_INT >= 21) {
+ Log.d(TAG, String.format("releaseoutputbuffer:%d,stampUs:%d", index, previousStampUs));
+ mCodec.releaseOutputBuffer(index, previousStampUs);
+ } else {
+ if (newSleepUs < 0) {
+ newSleepUs = 0;
+ }
+// Log.d(TAG,String.format("sleep:%d", newSleepUs/1000));
+ Thread.sleep(newSleepUs / 1000);
+ mCodec.releaseOutputBuffer(index, i420callback == null);
+ }
+
+ if (firstTime) {
+ Log.i(TAG, String.format("POST VIDEO_DISPLAYED!!!"));
+ ResultReceiver rr = mRR;
+ if (rr != null) {
+ Bundle data = new Bundle();
+ data.putInt(KEY_VIDEO_DECODE_TYPE, 1);
+ rr.send(RESULT_VIDEO_DISPLAYED, data);
+ }
+ }
+ previousStampUs = info.presentationTimeUs;
+ }
+
+ }
+ while (frameInfo != null || index < MediaCodec.INFO_TRY_AGAIN_LATER);
+ } catch (IllegalStateException ex) {
+ // mediacodec error...
+
+ ex.printStackTrace();
+
+ Log.e(TAG, String.format("init codec error due to %s", ex.getMessage()));
+
+ if (mCodec != null)
+ mCodec.release();
+ mCodec = null;
+
+ if (displayer != null) {
+ displayer.close();
+ Log.i(TAG, "AAAA 1217 displayer.close()");
+ }
+ displayer = null;
+
+ final VideoCodec.VideoDecoderLite decoder = new VideoCodec.VideoDecoderLite();
+ decoder.create(mSurface, initFrameInfo.codec == EASY_SDK_VIDEO_CODEC_H264);
+ mDecoder = decoder;
+ continue;
+ }
+
+ }
+ break;
+ } while (true);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ if (mCodec != null) {
+// mCodec.stop();
+ mCodec.release();
+ }
+
+ if (mDecoder != null) {
+ mDecoder.close();
+ Log.i(TAG, "AAAA 1238 mDecoder.close();");
+ }
+
+ if (displayer != null) {
+ displayer.close();
+ Log.i(TAG, "AAAA 1243 displayer.close();");
+ }
+ }
+ }
+ };
+
+ mThread.start();
+ }
+
+ private static final long fixSleepTime(long sleepTimeUs, long totalTimestampDifferUs, long delayUs) {
+ if (totalTimestampDifferUs < 0l) {
+ Log.w(TAG, String.format("totalTimestampDifferUs is:%d, this should not be happen.", totalTimestampDifferUs));
+ totalTimestampDifferUs = 0;
+ }
+
+ double dValue = ((double) (delayUs - totalTimestampDifferUs)) / 1000000d;
+ double radio = Math.exp(dValue);
+ double r = sleepTimeUs * radio + 0.5f;
+ Log.i(TAG, String.format("%d,%d,%d->%d", sleepTimeUs, totalTimestampDifferUs, delayUs, (int) r));
+ return (long) r;
+ }
+
+ @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+ public synchronized void startRecord(String path) {
+ if (mMediaInfo == null || mWidth == 0 || mHeight == 0 || mCSD0 == null)
+ return;
+
+ mRecordingPath = path;
+ EasyMuxer2 muxer2 = new EasyMuxer2();
+ mMuxerCuttingMillis = 0l;
+ mRecordingStatus = 0;
+ muxerPausedMillis = 0;
+ ByteBuffer csd1 = this.mCSD1;
+ if (csd1 == null) csd1 = ByteBuffer.allocate(0);
+ byte[] extra = new byte[mCSD0.capacity() + csd1.capacity()];
+ mCSD0.clear();
+ csd1.clear();
+ mCSD0.get(extra, 0, mCSD0.capacity());
+ csd1.get(extra, mCSD0.capacity(), csd1.capacity());
+
+ int r = muxer2.create(path, mMediaInfo.videoCodec == EASY_SDK_VIDEO_CODEC_H265 ? VIDEO_TYPE_H265 : VIDEO_TYPE_H264, mWidth, mHeight, extra, mMediaInfo.sample, mMediaInfo.channel);
+ if (r != 0) {
+ Log.w(TAG, "create muxer2:" + r);
+ return;
+ }
+
+ mMuxerWaitingKeyVideo = true;
+ this.muxer2 = muxer2;
+
+ ResultReceiver rr = mRR;
+ if (rr != null) {
+ rr.send(RESULT_RECORD_BEGIN, null);
+ }
+ }
+
+ public synchronized void pauseRecord() {
+ if (mRecordingStatus != -1) {
+ mRecordingStatus = -1;
+ muxerPausedMillis = SystemClock.elapsedRealtime();
+ }
+ }
+
+ public synchronized void resumeRecord() {
+ if (mRecordingStatus == -1) {
+ mMuxerWaitingKeyVideo = true;
+ mRecordingStatus = 1;
+ }
+ }
+
+ private static int getSampleIndex(int sample) {
+ for (int i = 0; i < AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE.length; i++) {
+ if (sample == AUDIO_SPECIFIC_CONFIG_SAMPLING_RATE_TABLE[i]) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ private void pumpAACSample(Client.FrameInfo frameInfo) {
+ EasyMuxer muxer = mObject;
+ if (muxer == null) return;
+ MediaCodec.BufferInfo bi = new MediaCodec.BufferInfo();
+ bi.offset = frameInfo.offset;
+ bi.size = frameInfo.length;
+ ByteBuffer buffer = ByteBuffer.wrap(frameInfo.buffer, bi.offset, bi.size);
+ bi.presentationTimeUs = frameInfo.stamp;
+
+ try {
+ if (!frameInfo.audio) {
+ throw new IllegalArgumentException("frame should be audio!");
+ }
+ if (frameInfo.codec != EASY_SDK_AUDIO_CODEC_AAC) {
+ throw new IllegalArgumentException("audio codec should be aac!");
+ }
+ bi.offset += 7;
+ bi.size -= 7;
+ muxer.pumpStream(buffer, bi, false);
+ } catch (IllegalStateException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private synchronized void pumpPCMSample(byte[] pcm, int length, long stampUS) {
+ EasyMuxer2 muxer2 = this.muxer2;
+ if (muxer2 == null)
+ return;
+
+ if (mRecordingStatus < 0)
+ return;
+
+ if (mMuxerWaitingKeyVideo) {
+ Log.i(TAG, "writeFrame ignore due to no key frame!");
+ return;
+ }
+
+ long timeStampMillis = stampUS / 1000;
+ timeStampMillis -= mMuxerCuttingMillis;
+ timeStampMillis = Math.max(0, timeStampMillis);
+ int r = muxer2.writeFrame(EasyMuxer2.AVMEDIA_TYPE_AUDIO, pcm, 0, length, timeStampMillis);
+ Log.i(TAG, "writeFrame audio ret:" + r);
+ }
+
+
+ private synchronized void pumpVideoSample(Client.FrameInfo frameInfo) {
+ EasyMuxer2 muxer2 = this.muxer2;
+ if (muxer2 == null) return;
+ if (mRecordingStatus < 0) return;
+ if (mMuxerWaitingKeyVideo) {
+ if (frameInfo.type == 1) {
+ mMuxerWaitingKeyVideo = false;
+ if (mRecordingStatus == 1) {
+ mMuxerCuttingMillis += SystemClock.elapsedRealtime() - muxerPausedMillis;
+ mRecordingStatus = 0;
+ }
+ }
+ }
+ if (mMuxerWaitingKeyVideo) {
+ Log.i(TAG, "writeFrame ignore due to no key frame!");
+ return;
+ }
+ if (frameInfo.type == 1) {
+// frameInfo.offset = 60;
+// frameInfo.length -= 60;
+ }
+ long timeStampMillis = frameInfo.stamp / 1000;
+ timeStampMillis -= mMuxerCuttingMillis;
+ timeStampMillis = Math.max(0, timeStampMillis);
+ int r = muxer2.writeFrame(EasyMuxer2.AVMEDIA_TYPE_VIDEO, frameInfo.buffer, frameInfo.offset, frameInfo.length, timeStampMillis);
+ Log.i(TAG, "writeFrame video ret:" + r);
+ }
+
+
+ public synchronized void stopRecord() {
+ mRecordingPath = null;
+ mMuxerCuttingMillis = 0l;
+ mRecordingStatus = 0;
+ muxerPausedMillis = 0;
+ EasyMuxer2 muxer2 = this.muxer2;
+ if (muxer2 == null) return;
+ this.muxer2 = null;
+ muxer2.close();
+ mObject = null;
+ ResultReceiver rr = mRR;
+ if (rr != null) {
+ rr.send(RESULT_RECORD_END, null);
+ }
+ }
+
+ @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
+ @Override
+ public void onSourceCallBack(int _channelId, int _channelPtr, int _frameType, Client.FrameInfo frameInfo) {
+// long begin = SystemClock.elapsedRealtime();
+ try {
+ onRTSPSourceCallBack1(_channelId, _channelPtr, _frameType, frameInfo);
+ } catch (Throwable e) {
+ e.printStackTrace();
+ } finally {
+// Log.d(TAG, String.format("onRTSPSourceCallBack %d", SystemClock.elapsedRealtime() - begin));
+ }
+ }
+
+ public void onRTSPSourceCallBack1(int _channelId, int _channelPtr, int _frameType, Client.FrameInfo frameInfo) {
+ Thread.currentThread().setName("PRODUCER_THREAD");
+ if (frameInfo != null) {
+ mReceivedDataLength += frameInfo.length;
+ }
+ if (_frameType == Client.EASY_SDK_VIDEO_FRAME_FLAG) {
+ //Log.d(TAG,String.format("receive video frame"));
+ if (frameInfo.codec != EASY_SDK_VIDEO_CODEC_H264 && frameInfo.codec != EASY_SDK_VIDEO_CODEC_H265) {
+ ResultReceiver rr = mRR;
+ if (!mNotSupportedVideoCB && rr != null) {
+ mNotSupportedVideoCB = true;
+ rr.send(RESULT_UNSUPPORTED_VIDEO, null);
+ }
+ return;
+ }
+// save2path(frameInfo.buffer, 0, frameInfo.length, "/sdcard/264.h264", true);
+ if (frameInfo.width == 0 || frameInfo.height == 0) {
+ return;
+ }
+
+ if (frameInfo.length >= 4) {
+ if (frameInfo.buffer[0] == 0 && frameInfo.buffer[1] == 0 && frameInfo.buffer[2] == 0 && frameInfo.buffer[3] == 1) {
+ if (frameInfo.length >= 8) {
+ if (frameInfo.buffer[4] == 0 && frameInfo.buffer[5] == 0 && frameInfo.buffer[6] == 0 && frameInfo.buffer[7] == 1) {
+ frameInfo.offset += 4;
+ frameInfo.length -= 4;
+ }
+ }
+ }
+ }
+
+// int offset = frameInfo.offset;
+// byte nal_unit_type = (byte) (frameInfo.buffer[offset + 4] & (byte) 0x1F);
+// if (nal_unit_type == 7 || nal_unit_type == 5) {
+// Log.i(TAG,String.format("recv I frame"));
+// }
+
+ if (frameInfo.type == 1) {
+ Log.i(TAG, String.format("recv I frame"));
+ }
+
+// boolean firstFrame = mNewestStample == 0;
+ mNewestStample = frameInfo.stamp;
+ frameInfo.audio = false;
+ if (mWaitingKeyFrame) {
+
+ ResultReceiver rr = mRR;
+ Bundle bundle = new Bundle();
+ bundle.putInt(EXTRA_VIDEO_WIDTH, frameInfo.width);
+ bundle.putInt(EXTRA_VIDEO_HEIGHT, frameInfo.height);
+ mWidth = frameInfo.width;
+ mHeight = frameInfo.height;
+ Log.i(TAG, String.format("RESULT_VIDEO_SIZE:%d*%d", frameInfo.width, frameInfo.height));
+ if (rr != null) rr.send(RESULT_VIDEO_SIZE, bundle);
+
+
+ Log.i(TAG, String.format("width:%d,height:%d", mWidth, mHeight));
+
+ if (frameInfo.codec == EASY_SDK_VIDEO_CODEC_H264) {
+ byte[] dataOut = new byte[128];
+ int[] outLen = new int[]{128};
+ int result = getXPS(frameInfo.buffer, 0, 256, dataOut, outLen, 7);
+ if (result >= 0) {
+ ByteBuffer csd0 = ByteBuffer.allocate(outLen[0]);
+ csd0.put(dataOut, 0, outLen[0]);
+ csd0.clear();
+ mCSD0 = csd0;
+ Log.i(TAG, String.format("CSD-0 searched"));
+ }
+ outLen[0] = 128;
+ result = getXPS(frameInfo.buffer, 0, 256, dataOut, outLen, 8);
+ if (result >= 0) {
+ ByteBuffer csd1 = ByteBuffer.allocate(outLen[0]);
+ csd1.put(dataOut, 0, outLen[0]);
+ csd1.clear();
+ mCSD1 = csd1;
+ Log.i(TAG, String.format("CSD-1 searched"));
+ }
+ if (false) {
+ int off = (result - frameInfo.offset);
+ frameInfo.offset += off;
+ frameInfo.length -= off;
+ }
+ } else {
+ byte[] spsPps = getvps_sps_pps(frameInfo.buffer, 0, 256);
+ if (spsPps != null) {
+ mCSD0 = ByteBuffer.wrap(spsPps);
+ }
+ }
+
+ if (frameInfo.type != 1) {
+ Log.w(TAG, String.format("discard p frame."));
+ return;
+ }
+ mWaitingKeyFrame = false;
+ synchronized (this) {
+ if (!TextUtils.isEmpty(mRecordingPath) && mObject == null) {
+ startRecord(mRecordingPath);
+ }
+ }
+ } else {
+ int width = frameInfo.width;
+ int height = frameInfo.height;
+ if (width != 0 && height != 0)
+ if (width != mWidth || height != mHeight) {
+ // resolution change...
+ ResultReceiver rr = mRR;
+ Bundle bundle = new Bundle();
+ bundle.putInt(EXTRA_VIDEO_WIDTH, frameInfo.width);
+ bundle.putInt(EXTRA_VIDEO_HEIGHT, frameInfo.height);
+ mWidth = frameInfo.width;
+ mHeight = frameInfo.height;
+ Log.i(TAG, String.format("RESULT_VIDEO_SIZE:%d*%d", frameInfo.width, frameInfo.height));
+ if (rr != null) rr.send(RESULT_VIDEO_SIZE, bundle);
+ }
+ }
+// Log.d(TAG, String.format("queue size :%d", mQueue.size()));
+ try {
+ mQueue.put(frameInfo);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ } else if (_frameType == Client.EASY_SDK_AUDIO_FRAME_FLAG) {
+ mNewestStample = frameInfo.stamp;
+ frameInfo.audio = true;
+ if (true) {
+ if (frameInfo.codec != EASY_SDK_AUDIO_CODEC_AAC &&
+ frameInfo.codec != EASY_SDK_AUDIO_CODEC_G711A &&
+ frameInfo.codec != EASY_SDK_AUDIO_CODEC_G711U &&
+ frameInfo.codec != EASY_SDK_AUDIO_CODEC_G726) {
+ ResultReceiver rr = mRR;
+ if (!mNotSupportedAudioCB && rr != null) {
+ mNotSupportedAudioCB = true;
+ if (rr != null) {
+ rr.send(RESULT_UNSUPPORTED_AUDIO, null);
+ }
+ }
+ return;
+ }
+
+ }
+ Log.d(TAG, String.format("queue size :%d", mQueue.size()));
+ try {
+ mQueue.put(frameInfo);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ } else if (_frameType == 0) {
+ // time out...
+ if (!mTimeout) {
+ mTimeout = true;
+
+ ResultReceiver rr = mRR;
+ if (rr != null) rr.send(RESULT_TIMEOUT, null);
+ }
+ } else if (_frameType == Client.EASY_SDK_EVENT_FRAME_FLAG) {
+ ResultReceiver rr = mRR;
+ Bundle resultData = new Bundle();
+ resultData.putString("event-msg", new String(frameInfo.buffer));
+ if (rr != null) rr.send(RESULT_EVENT, null);
+ }
+ }
+
+ @Override
+ public void onMediaInfoCallBack(int _channelId, Client.MediaInfo mi) {
+ mMediaInfo = mi;
+ Log.i(TAG, String.format("MediaInfo fetchd\n%s", mi));
+ }
+
+ @Override
+ public void onEvent(int channel, int err, int info) {
+ ResultReceiver rr = mRR;
+ Bundle resultData = new Bundle();
+ /*
+ int state = 0;
+ int err = EasyRTSP_GetErrCode(fRTSPHandle);
+ // EasyRTSPClient开始进行连接,建立EasyRTSPClient连接线程
+ if (NULL == _pBuf && NULL == _frameInfo)
+ {
+ LOGD("Recv Event: Connecting...");
+ state = 1;
+ }
+
+ // EasyPlayerClient RTSPClient连接错误,错误码通过EasyRTSP_GetErrCode()接口获取,比如404
+ else if (NULL != _frameInfo && _frameInfo->codec == EASY_SDK_EVENT_CODEC_ERROR)
+ {
+ LOGD("Recv Event: Error:%d ...\n", err);
+ state = 2;
+ }
+
+ // EasyRTSPClient连接线程退出,此时上层应该停止相关调用,复位连接按钮等状态
+ else if (NULL != _frameInfo && _frameInfo->codec == EASY_SDK_EVENT_CODEC_EXIT)
+ {
+ LOGD("Recv Event: Exit,Error:%d ...", err);
+ state = 3;
+ }
+
+ * */
+ switch (info) {
+ case 1:
+ resultData.putString("event-msg", "连接中...");
+ break;
+ case 2:
+ resultData.putInt("errorcode", err);
+ resultData.putString("event-msg", String.format("错误:%d", err));
+ break;
+ case 3:
+ resultData.putInt("errorcode", err);
+ resultData.putString("event-msg", String.format("线程退出。%d", err));
+ break;
+ }
+ if (rr != null) rr.send(RESULT_EVENT, resultData);
+ }
+
+ /**
+ * 旋转YUV格式数据
+ *
+ * @param src YUV数据
+ * @param format 0,420P;1,420SP
+ * @param width 宽度
+ * @param height 高度
+ * @param degree 旋转度数
+ */
+ private static void yuvRotate(byte[] src, int format, int width, int height, int degree) {
+ int offset = 0;
+ if (format == 0) {
+ JNIUtil.rotateMatrix(src, offset, width, height, degree);
+ offset += (width * height);
+ JNIUtil.rotateMatrix(src, offset, width / 2, height / 2, degree);
+ offset += width * height / 4;
+ JNIUtil.rotateMatrix(src, offset, width / 2, height / 2, degree);
+ } else if (format == 1) {
+ JNIUtil.rotateMatrix(src, offset, width, height, degree);
+ offset += width * height;
+ JNIUtil.rotateShortMatrix(src, offset, width / 2, height / 2, degree);
+ }
+ }
+}
\ No newline at end of file
diff --git a/library-rtsp/src/main/java/org/easydarwin/video/VideoCodec.java b/library-rtsp/src/main/java/org/easydarwin/video/VideoCodec.java
new file mode 100644
index 0000000..8061b10
--- /dev/null
+++ b/library-rtsp/src/main/java/org/easydarwin/video/VideoCodec.java
@@ -0,0 +1,100 @@
+package org.easydarwin.video;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Created by John on 2017/1/5.
+ */
+public class VideoCodec {
+
+ static {
+ System.loadLibrary("proffmpeg");
+ System.loadLibrary("VideoCodecer");
+ }
+
+ public static final int DECODER_H264 = 0;
+ public static final int DECODER_H265 = 1;
+
+ private native long create(Object surface, int codec);
+
+ private native void close(long handle);
+
+ protected long mHandle;
+
+ private native int decode(long handle, byte[] in, int offset, int length, int[] size);
+
+ private native ByteBuffer decodeYUV(long handle, byte[] in, int offset, int length, int[] size);
+
+ private native void releaseYUV(ByteBuffer buffer);
+
+ private native void decodeYUV2(long handle, ByteBuffer buffer, int width, int height);
+
+ public int decoder_create(Object surface, int codec) {
+ mHandle = create(surface, codec);
+ if (mHandle != 0) {
+ return 0;
+ }
+
+ return -1;
+ }
+
+ public int decoder_decode(byte[] in, int offset, int length, int[] size) {
+ int result = decode(mHandle, in, offset, length, size);
+ return result;
+ }
+
+ public ByteBuffer decoder_decodeYUV(byte[] in, int offset, int length, int[] size) {
+ ByteBuffer buffer = decodeYUV(mHandle, in, offset, length, size);
+ return buffer;
+ }
+
+ public void decoder_releaseBuffer(ByteBuffer buffer) {
+ releaseYUV(buffer);
+ }
+
+ public void decoder_decodeBuffer(ByteBuffer buffer, int width, int height) {
+ decodeYUV2(mHandle, buffer, width, height);
+ }
+
+ public void decoder_close() {
+ if (mHandle == 0) {
+ return;
+ }
+
+ close(mHandle);
+ mHandle = 0;
+ }
+
+ public static class VideoDecoderLite extends VideoCodec {
+
+ private int[] mSize;
+ private Object surface;
+
+ public void create(Object surface, boolean h264) {
+// if (surface == null) {
+// throw new NullPointerException("surface is null!");
+// }
+ this.surface = surface;
+ decoder_create(surface, h264 ? 0 : 1);
+ mSize = new int[2];
+ }
+
+ public void close() {
+ decoder_close();
+ }
+
+ protected int decodeFrame(Client.FrameInfo aFrame, int[] size) {
+ int nRet = 0;
+ nRet = decoder_decode(aFrame.buffer, aFrame.offset, aFrame.length, size);
+ return nRet;
+ }
+
+ protected ByteBuffer decodeFrameYUV(Client.FrameInfo aFrame, int[] size) {
+ return decoder_decodeYUV(aFrame.buffer, aFrame.offset, aFrame.length, size);
+ }
+
+ protected void releaseBuffer(ByteBuffer buffer) {
+ decoder_releaseBuffer(buffer);
+ }
+ }
+}
diff --git a/library-rtsp/src/main/jniLibs/arm64-v8a/libAudioCodecer.so b/library-rtsp/src/main/jniLibs/arm64-v8a/libAudioCodecer.so
new file mode 100644
index 0000000..d0cc23b
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/arm64-v8a/libAudioCodecer.so differ
diff --git a/library-rtsp/src/main/jniLibs/arm64-v8a/libEasyRTSPClient.so b/library-rtsp/src/main/jniLibs/arm64-v8a/libEasyRTSPClient.so
new file mode 100644
index 0000000..4ac1deb
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/arm64-v8a/libEasyRTSPClient.so differ
diff --git a/library-rtsp/src/main/jniLibs/arm64-v8a/libVideoCodecer.so b/library-rtsp/src/main/jniLibs/arm64-v8a/libVideoCodecer.so
new file mode 100644
index 0000000..8fe2e56
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/arm64-v8a/libVideoCodecer.so differ
diff --git a/library-rtsp/src/main/jniLibs/arm64-v8a/libproffmpeg.so b/library-rtsp/src/main/jniLibs/arm64-v8a/libproffmpeg.so
new file mode 100644
index 0000000..11d1c0d
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/arm64-v8a/libproffmpeg.so differ
diff --git a/library-rtsp/src/main/jniLibs/arm64-v8a/libyuv_android.so b/library-rtsp/src/main/jniLibs/arm64-v8a/libyuv_android.so
new file mode 100644
index 0000000..5277dcd
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/arm64-v8a/libyuv_android.so differ
diff --git a/library-rtsp/src/main/jniLibs/armeabi-v7a/libAudioCodecer.so b/library-rtsp/src/main/jniLibs/armeabi-v7a/libAudioCodecer.so
new file mode 100644
index 0000000..9135cfd
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/armeabi-v7a/libAudioCodecer.so differ
diff --git a/library-rtsp/src/main/jniLibs/armeabi-v7a/libEasyRTSPClient.so b/library-rtsp/src/main/jniLibs/armeabi-v7a/libEasyRTSPClient.so
new file mode 100644
index 0000000..fba13b2
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/armeabi-v7a/libEasyRTSPClient.so differ
diff --git a/library-rtsp/src/main/jniLibs/armeabi-v7a/libVideoCodecer.so b/library-rtsp/src/main/jniLibs/armeabi-v7a/libVideoCodecer.so
new file mode 100644
index 0000000..764e05a
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/armeabi-v7a/libVideoCodecer.so differ
diff --git a/library-rtsp/src/main/jniLibs/armeabi-v7a/libproffmpeg.so b/library-rtsp/src/main/jniLibs/armeabi-v7a/libproffmpeg.so
new file mode 100644
index 0000000..05b5f40
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/armeabi-v7a/libproffmpeg.so differ
diff --git a/library-rtsp/src/main/jniLibs/armeabi-v7a/libyuv_android.so b/library-rtsp/src/main/jniLibs/armeabi-v7a/libyuv_android.so
new file mode 100644
index 0000000..73d36ee
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/armeabi-v7a/libyuv_android.so differ
diff --git a/library-rtsp/src/main/jniLibs/x86/libAudioCodecer.so b/library-rtsp/src/main/jniLibs/x86/libAudioCodecer.so
new file mode 100644
index 0000000..2de748a
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/x86/libAudioCodecer.so differ
diff --git a/library-rtsp/src/main/jniLibs/x86/libEasyRTSPClient.so b/library-rtsp/src/main/jniLibs/x86/libEasyRTSPClient.so
new file mode 100644
index 0000000..7d1f042
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/x86/libEasyRTSPClient.so differ
diff --git a/library-rtsp/src/main/jniLibs/x86/libVideoCodecer.so b/library-rtsp/src/main/jniLibs/x86/libVideoCodecer.so
new file mode 100644
index 0000000..9436b50
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/x86/libVideoCodecer.so differ
diff --git a/library-rtsp/src/main/jniLibs/x86/libproffmpeg.so b/library-rtsp/src/main/jniLibs/x86/libproffmpeg.so
new file mode 100644
index 0000000..ec98fa6
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/x86/libproffmpeg.so differ
diff --git a/library-rtsp/src/main/jniLibs/x86/libyuv_android.so b/library-rtsp/src/main/jniLibs/x86/libyuv_android.so
new file mode 100644
index 0000000..b3978e1
Binary files /dev/null and b/library-rtsp/src/main/jniLibs/x86/libyuv_android.so differ
diff --git a/library-rtsp/src/main/res/values/strings.xml b/library-rtsp/src/main/res/values/strings.xml
new file mode 100644
index 0000000..321e479
--- /dev/null
+++ b/library-rtsp/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+
+ RTSPClient
+
diff --git a/settings.gradle b/settings.gradle
index 43a1ef1..04997c6 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -3,4 +3,4 @@ include ':library-push'
include ':library-ijkplayer'
include ':library-serialPort'
include ':library-common'
-include ':library-vlc'
+include ':library-rtsp'