mirror of
https://github.com/oxen-io/session-android.git
synced 2025-08-25 13:57:39 +00:00
Custom streaming video muxer.
This commit is contained in:
36
video/build.gradle
Normal file
36
video/build.gradle
Normal file
@@ -0,0 +1,36 @@
|
||||
apply plugin: 'com.android.library'
|
||||
apply plugin: 'witness'
|
||||
apply from: 'witness-verifications.gradle'
|
||||
|
||||
android {
|
||||
buildToolsVersion BUILD_TOOL_VERSION
|
||||
compileSdkVersion COMPILE_SDK
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion MINIMUM_SDK
|
||||
targetSdkVersion TARGET_SDK
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility JAVA_VERSION
|
||||
targetCompatibility JAVA_VERSION
|
||||
}
|
||||
}
|
||||
|
||||
dependencyVerification {
|
||||
configuration = '(debug|release)RuntimeClasspath'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
lintChecks project(':lintchecks')
|
||||
|
||||
api 'androidx.annotation:annotation:1.1.0'
|
||||
|
||||
implementation project(':core-util')
|
||||
|
||||
implementation 'org.mp4parser:isoparser:1.9.39'
|
||||
implementation 'org.mp4parser:streaming:1.9.39'
|
||||
implementation('org.mp4parser:muxer:1.9.39') {
|
||||
exclude group: 'junit', module: 'junit'
|
||||
}
|
||||
}
|
2
video/src/main/AndroidManifest.xml
Normal file
2
video/src/main/AndroidManifest.xml
Normal file
@@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest package="org.signal.video" />
|
@@ -0,0 +1,52 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaMuxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.RequiresApi;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileDescriptor;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
final class AndroidMuxer implements Muxer {
|
||||
|
||||
private final MediaMuxer muxer;
|
||||
|
||||
AndroidMuxer(final @NonNull File file) throws IOException {
|
||||
muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
}
|
||||
|
||||
@RequiresApi(26)
|
||||
AndroidMuxer(final @NonNull FileDescriptor fileDescriptor) throws IOException {
|
||||
muxer = new MediaMuxer(fileDescriptor, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
muxer.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
muxer.stop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int addTrack(final @NonNull MediaFormat format) {
|
||||
return muxer.addTrack(format);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeSampleData(final int trackIndex, final @NonNull ByteBuffer byteBuf, final @NonNull MediaCodec.BufferInfo bufferInfo) {
|
||||
muxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
muxer.release();
|
||||
}
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
public final class EncodingException extends Exception {
|
||||
EncodingException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
EncodingException(String message, Exception inner) {
|
||||
super(message, inner);
|
||||
}
|
||||
}
|
@@ -0,0 +1,187 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
import org.signal.core.util.logging.Log;
|
||||
|
||||
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec encoder input.
|
||||
* <p>
|
||||
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
|
||||
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
|
||||
* to the video encoder.
|
||||
*/
|
||||
final class InputSurface {
|
||||
private static final String TAG = "InputSurface";
|
||||
private static final boolean VERBOSE = false;
|
||||
|
||||
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
|
||||
private EGLDisplay mEGLDisplay;
|
||||
private EGLContext mEGLContext;
|
||||
private EGLSurface mEGLSurface;
|
||||
|
||||
private Surface mSurface;
|
||||
|
||||
/**
|
||||
* Creates an InputSurface from a Surface.
|
||||
*/
|
||||
InputSurface(Surface surface) throws TranscodingException {
|
||||
if (surface == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
mSurface = surface;
|
||||
|
||||
eglSetup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
|
||||
*/
|
||||
private void eglSetup() throws TranscodingException {
|
||||
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new TranscodingException("unable to get EGL14 display");
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
|
||||
mEGLDisplay = null;
|
||||
throw new TranscodingException("unable to initialize EGL14");
|
||||
}
|
||||
|
||||
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to be able to tell if the frame is reasonable.
|
||||
int[] attribList = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
|
||||
numConfigs, 0)) {
|
||||
throw new TranscodingException("unable to find RGB888+recordable ES2 EGL config");
|
||||
}
|
||||
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
|
||||
attrib_list, 0);
|
||||
checkEglError("eglCreateContext");
|
||||
if (mEGLContext == null) {
|
||||
throw new TranscodingException("null context");
|
||||
}
|
||||
|
||||
// Create a window surface, and attach it to the Surface we received.
|
||||
int[] surfaceAttribs = {
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
|
||||
surfaceAttribs, 0);
|
||||
checkEglError("eglCreateWindowSurface");
|
||||
if (mEGLSurface == null) {
|
||||
throw new TranscodingException("surface was null");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context. Also releases the
|
||||
* Surface that was passed to our constructor.
|
||||
*/
|
||||
public void release() {
|
||||
if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
|
||||
// Clear the current context and surface to ensure they are discarded immediately.
|
||||
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
|
||||
EGL14.EGL_NO_CONTEXT);
|
||||
}
|
||||
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
|
||||
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
|
||||
//EGL14.eglTerminate(mEGLDisplay);
|
||||
|
||||
mSurface.release();
|
||||
|
||||
// null everything out so future attempts to use this object will cause an NPE
|
||||
mEGLDisplay = null;
|
||||
mEGLContext = null;
|
||||
mEGLSurface = null;
|
||||
|
||||
mSurface = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
void makeCurrent() throws TranscodingException {
|
||||
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
|
||||
throw new TranscodingException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls eglSwapBuffers. Use this to "publish" the current frame.
|
||||
*/
|
||||
boolean swapBuffers() {
|
||||
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Surface that the MediaCodec receives buffers from.
|
||||
*/
|
||||
public Surface getSurface() {
|
||||
return mSurface;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
|
||||
*/
|
||||
void setPresentationTime(long nsecs) {
|
||||
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private static void checkEglError(String msg) throws TranscodingException {
|
||||
boolean failed = false;
|
||||
int error;
|
||||
while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
|
||||
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
failed = true;
|
||||
}
|
||||
if (failed) {
|
||||
throw new TranscodingException("EGL error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,22 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public interface Muxer {
|
||||
|
||||
void start() throws IOException;
|
||||
|
||||
void stop() throws IOException;
|
||||
|
||||
int addTrack(@NonNull MediaFormat format) throws IOException;
|
||||
|
||||
void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException;
|
||||
|
||||
void release();
|
||||
}
|
@@ -0,0 +1,303 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.view.Surface;
|
||||
|
||||
import org.signal.core.util.logging.Log;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
/**
|
||||
* Holds state associated with a Surface used for MediaCodec decoder output.
|
||||
* <p>
|
||||
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
|
||||
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
|
||||
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
|
||||
* texture with updateTexImage, then render the texture with GL to a pbuffer.
|
||||
* <p>
|
||||
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
|
||||
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
|
||||
* we just draw it on whatever surface is current.
|
||||
* <p>
|
||||
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
|
||||
* can potentially drop frames.
|
||||
*/
|
||||
final class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
|
||||
private static final String TAG = "OutputSurface";
|
||||
private static final boolean VERBOSE = false;
|
||||
|
||||
private static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
|
||||
private EGL10 mEGL;
|
||||
private EGLDisplay mEGLDisplay;
|
||||
private EGLContext mEGLContext;
|
||||
private EGLSurface mEGLSurface;
|
||||
|
||||
private SurfaceTexture mSurfaceTexture;
|
||||
private Surface mSurface;
|
||||
|
||||
private final Object mFrameSyncObject = new Object(); // guards mFrameAvailable
|
||||
private boolean mFrameAvailable;
|
||||
|
||||
private TextureRender mTextureRender;
|
||||
|
||||
/**
|
||||
* Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
|
||||
* EGL context and surface will be made current. Creates a Surface that can be passed
|
||||
* to MediaCodec.configure().
|
||||
*/
|
||||
OutputSurface(int width, int height, boolean flipX) throws TranscodingException {
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
eglSetup(width, height);
|
||||
makeCurrent();
|
||||
|
||||
setup(flipX);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an OutputSurface using the current EGL context. Creates a Surface that can be
|
||||
* passed to MediaCodec.configure().
|
||||
*/
|
||||
OutputSurface() throws TranscodingException {
|
||||
setup(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
|
||||
* with the SurfaceTexture.
|
||||
*/
|
||||
private void setup(boolean flipX) throws TranscodingException {
|
||||
mTextureRender = new TextureRender(flipX);
|
||||
mTextureRender.surfaceCreated();
|
||||
|
||||
// Even if we don't access the SurfaceTexture after the constructor returns, we
|
||||
// still need to keep a reference to it. The Surface doesn't retain a reference
|
||||
// at the Java level, so if we don't either then the object can get GCed, which
|
||||
// causes the native finalizer to run.
|
||||
if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
|
||||
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
|
||||
|
||||
// This doesn't work if OutputSurface is created on the thread that CTS started for
|
||||
// these test cases.
|
||||
//
|
||||
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
|
||||
// create a Handler that uses it. The "frame available" message is delivered
|
||||
// there, but since we're not a Looper-based thread we'll never see it. For
|
||||
// this to do anything useful, OutputSurface must be created on a thread without
|
||||
// a Looper, so that SurfaceTexture uses the main application Looper instead.
|
||||
//
|
||||
// Java language note: passing "this" out of a constructor is generally unwise,
|
||||
// but we should be able to get away with it here.
|
||||
mSurfaceTexture.setOnFrameAvailableListener(this);
|
||||
|
||||
mSurface = new Surface(mSurfaceTexture);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
|
||||
*/
|
||||
private void eglSetup(int width, int height) throws TranscodingException {
|
||||
mEGL = (EGL10)EGLContext.getEGL();
|
||||
mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
if (!mEGL.eglInitialize(mEGLDisplay, null)) {
|
||||
throw new TranscodingException("unable to initialize EGL10");
|
||||
}
|
||||
|
||||
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
|
||||
// to be able to tell if the frame is reasonable.
|
||||
int[] attribList = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
|
||||
throw new TranscodingException("unable to find RGB888+pbuffer EGL config");
|
||||
}
|
||||
|
||||
// Configure context for OpenGL ES 2.0.
|
||||
int[] attrib_list = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
|
||||
attrib_list);
|
||||
checkEglError("eglCreateContext");
|
||||
if (mEGLContext == null) {
|
||||
throw new TranscodingException("null context");
|
||||
}
|
||||
|
||||
// Create a pbuffer surface. By using this for output, we can use glReadPixels
|
||||
// to test values in the output.
|
||||
int[] surfaceAttribs = {
|
||||
EGL10.EGL_WIDTH, width,
|
||||
EGL10.EGL_HEIGHT, height,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs);
|
||||
checkEglError("eglCreatePbufferSurface");
|
||||
if (mEGLSurface == null) {
|
||||
throw new TranscodingException("surface was null");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard all resources held by this class, notably the EGL context.
|
||||
*/
|
||||
public void release() {
|
||||
if (mEGL != null) {
|
||||
if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
|
||||
// Clear the current context and surface to ensure they are discarded immediately.
|
||||
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
|
||||
EGL10.EGL_NO_CONTEXT);
|
||||
}
|
||||
mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
|
||||
mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
|
||||
//mEGL.eglTerminate(mEGLDisplay);
|
||||
}
|
||||
|
||||
mSurface.release();
|
||||
|
||||
// this causes a bunch of warnings that appear harmless but might confuse someone:
|
||||
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
|
||||
//mSurfaceTexture.release();
|
||||
|
||||
// null everything out so future attempts to use this object will cause an NPE
|
||||
mEGLDisplay = null;
|
||||
mEGLContext = null;
|
||||
mEGLSurface = null;
|
||||
mEGL = null;
|
||||
|
||||
mTextureRender = null;
|
||||
mSurface = null;
|
||||
mSurfaceTexture = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes our EGL context and surface current.
|
||||
*/
|
||||
private void makeCurrent() throws TranscodingException {
|
||||
if (mEGL == null) {
|
||||
throw new TranscodingException("not configured for makeCurrent");
|
||||
}
|
||||
checkEglError("before makeCurrent");
|
||||
if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
|
||||
throw new TranscodingException("eglMakeCurrent failed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Surface that we draw onto.
|
||||
*/
|
||||
public Surface getSurface() {
|
||||
return mSurface;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces the fragment shader.
|
||||
*/
|
||||
void changeFragmentShader(String fragmentShader) throws TranscodingException {
|
||||
mTextureRender.changeFragmentShader(fragmentShader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Latches the next buffer into the texture. Must be called from the thread that created
|
||||
* the OutputSurface object, after the onFrameAvailable callback has signaled that new
|
||||
* data is available.
|
||||
*/
|
||||
void awaitNewImage() throws TranscodingException {
|
||||
final int TIMEOUT_MS = 750;
|
||||
|
||||
synchronized (mFrameSyncObject) {
|
||||
final long expireTime = System.currentTimeMillis() + TIMEOUT_MS;
|
||||
|
||||
while (!mFrameAvailable) {
|
||||
try {
|
||||
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
|
||||
// stalling the test if it doesn't arrive.
|
||||
mFrameSyncObject.wait(TIMEOUT_MS);
|
||||
|
||||
if (!mFrameAvailable && System.currentTimeMillis() > expireTime) {
|
||||
throw new TranscodingException("Surface frame wait timed out");
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
// shouldn't happen
|
||||
throw new TranscodingException(ie);
|
||||
}
|
||||
}
|
||||
mFrameAvailable = false;
|
||||
}
|
||||
|
||||
// Latch the data.
|
||||
TextureRender.checkGlError("before updateTexImage");
|
||||
mSurfaceTexture.updateTexImage();
|
||||
}
|
||||
|
||||
/**
|
||||
* Draws the data from SurfaceTexture onto the current EGL surface.
|
||||
*/
|
||||
void drawImage() throws TranscodingException {
|
||||
mTextureRender.drawFrame(mSurfaceTexture);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture st) {
|
||||
if (VERBOSE) Log.d(TAG, "new frame available");
|
||||
synchronized (mFrameSyncObject) {
|
||||
if (mFrameAvailable) {
|
||||
try {
|
||||
throw new TranscodingException("mFrameAvailable already set, frame could be dropped");
|
||||
} catch (TranscodingException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
mFrameAvailable = true;
|
||||
mFrameSyncObject.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for EGL errors.
|
||||
*/
|
||||
private void checkEglError(String msg) throws TranscodingException {
|
||||
boolean failed = false;
|
||||
int error;
|
||||
while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
|
||||
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
|
||||
failed = true;
|
||||
}
|
||||
if (failed) {
|
||||
throw new TranscodingException("EGL error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,10 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
public final class Preconditions {
|
||||
|
||||
public static void checkState(final String errorMessage, final boolean expression) {
|
||||
if (!expression) {
|
||||
throw new IllegalStateException(errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,258 @@
|
||||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
|
||||
import org.signal.core.util.logging.Log;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
/**
|
||||
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
|
||||
*/
|
||||
final class TextureRender {
|
||||
private static final String TAG = "TextureRender";
|
||||
|
||||
private static final int FLOAT_SIZE_BYTES = 4;
|
||||
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
|
||||
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
|
||||
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
|
||||
private final float[] mTriangleVerticesData = {
|
||||
// X, Y, Z, U, V
|
||||
-1.0f, -1.0f, 0, 0.f, 0.f,
|
||||
1.0f, -1.0f, 0, 1.f, 0.f,
|
||||
-1.0f, 1.0f, 0, 0.f, 1.f,
|
||||
1.0f, 1.0f, 0, 1.f, 1.f,
|
||||
};
|
||||
|
||||
private final float[] mTriangleVerticesDataFlippedX = {
|
||||
// X, Y, Z, U, V
|
||||
-1.0f, -1.0f, 0, 1.f, 0.f,
|
||||
1.0f, -1.0f, 0, 0.f, 0.f,
|
||||
-1.0f, 1.0f, 0, 1.f, 1.f,
|
||||
1.0f, 1.0f, 0, 0.f, 1.f,
|
||||
};
|
||||
|
||||
private final FloatBuffer mTriangleVertices;
|
||||
|
||||
private static final String VERTEX_SHADER =
|
||||
"uniform mat4 uMVPMatrix;\n" +
|
||||
"uniform mat4 uSTMatrix;\n" +
|
||||
"attribute vec4 aPosition;\n" +
|
||||
"attribute vec4 aTextureCoord;\n" +
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_Position = uMVPMatrix * aPosition;\n" +
|
||||
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
|
||||
"}\n";
|
||||
|
||||
private static final String FRAGMENT_SHADER =
|
||||
"#extension GL_OES_EGL_image_external : require\n" +
|
||||
"precision mediump float;\n" + // highp here doesn't seem to matter
|
||||
"varying vec2 vTextureCoord;\n" +
|
||||
"uniform samplerExternalOES sTexture;\n" +
|
||||
"void main() {\n" +
|
||||
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
|
||||
"}\n";
|
||||
|
||||
private final float[] mMVPMatrix = new float[16];
|
||||
private final float[] mSTMatrix = new float[16];
|
||||
|
||||
private int mProgram;
|
||||
private int mTextureID = -12345;
|
||||
private int muMVPMatrixHandle;
|
||||
private int muSTMatrixHandle;
|
||||
private int maPositionHandle;
|
||||
private int maTextureHandle;
|
||||
|
||||
TextureRender(boolean flipX) {
|
||||
float[] verticesData = flipX ? mTriangleVerticesDataFlippedX : mTriangleVerticesData;
|
||||
mTriangleVertices = ByteBuffer.allocateDirect(
|
||||
verticesData.length * FLOAT_SIZE_BYTES)
|
||||
.order(ByteOrder.nativeOrder()).asFloatBuffer();
|
||||
mTriangleVertices.put(verticesData).position(0);
|
||||
|
||||
Matrix.setIdentityM(mSTMatrix, 0);
|
||||
}
|
||||
|
||||
int getTextureId() {
|
||||
return mTextureID;
|
||||
}
|
||||
|
||||
void drawFrame(SurfaceTexture st) throws TranscodingException {
|
||||
checkGlError("onDrawFrame start");
|
||||
st.getTransformMatrix(mSTMatrix);
|
||||
|
||||
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
|
||||
|
||||
GLES20.glUseProgram(mProgram);
|
||||
checkGlError("glUseProgram");
|
||||
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
|
||||
|
||||
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
|
||||
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
|
||||
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
|
||||
checkGlError("glVertexAttribPointer maPosition");
|
||||
GLES20.glEnableVertexAttribArray(maPositionHandle);
|
||||
checkGlError("glEnableVertexAttribArray maPositionHandle");
|
||||
|
||||
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
|
||||
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
|
||||
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
|
||||
checkGlError("glVertexAttribPointer maTextureHandle");
|
||||
GLES20.glEnableVertexAttribArray(maTextureHandle);
|
||||
checkGlError("glEnableVertexAttribArray maTextureHandle");
|
||||
|
||||
Matrix.setIdentityM(mMVPMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
|
||||
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
|
||||
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
checkGlError("glDrawArrays");
|
||||
GLES20.glFinish();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes GL state. Call this after the EGL surface has been created and made current.
|
||||
*/
|
||||
void surfaceCreated() throws TranscodingException {
|
||||
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
|
||||
if (mProgram == 0) {
|
||||
throw new TranscodingException("failed creating program");
|
||||
}
|
||||
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
|
||||
checkGlError("glGetAttribLocation aPosition");
|
||||
if (maPositionHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for aPosition");
|
||||
}
|
||||
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
|
||||
checkGlError("glGetAttribLocation aTextureCoord");
|
||||
if (maTextureHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for aTextureCoord");
|
||||
}
|
||||
|
||||
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
|
||||
checkGlError("glGetUniformLocation uMVPMatrix");
|
||||
if (muMVPMatrixHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for uMVPMatrix");
|
||||
}
|
||||
|
||||
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
|
||||
checkGlError("glGetUniformLocation uSTMatrix");
|
||||
if (muSTMatrixHandle == -1) {
|
||||
throw new TranscodingException("Could not get attrib location for uSTMatrix");
|
||||
}
|
||||
|
||||
int[] textures = new int[1];
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
|
||||
mTextureID = textures[0];
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
|
||||
checkGlError("glBindTexture mTextureID");
|
||||
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
|
||||
GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
|
||||
GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
|
||||
GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
|
||||
GLES20.GL_CLAMP_TO_EDGE);
|
||||
checkGlError("glTexParameter");
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces the fragment shader.
|
||||
*/
|
||||
public void changeFragmentShader(String fragmentShader) throws TranscodingException {
|
||||
GLES20.glDeleteProgram(mProgram);
|
||||
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
|
||||
if (mProgram == 0) {
|
||||
throw new TranscodingException("failed creating program");
|
||||
}
|
||||
}
|
||||
|
||||
private static int loadShader(int shaderType, String source) throws TranscodingException {
|
||||
int shader = GLES20.glCreateShader(shaderType);
|
||||
checkGlError("glCreateShader type=" + shaderType);
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compiled = new int[1];
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
|
||||
if (compiled[0] == 0) {
|
||||
Log.e(TAG, "Could not compile shader " + shaderType + ":");
|
||||
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
|
||||
GLES20.glDeleteShader(shader);
|
||||
shader = 0;
|
||||
}
|
||||
return shader;
|
||||
}
|
||||
|
||||
private int createProgram(String vertexSource, String fragmentSource) throws TranscodingException {
|
||||
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
if (vertexShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
if (pixelShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int program = GLES20.glCreateProgram();
|
||||
checkGlError("glCreateProgram");
|
||||
if (program == 0) {
|
||||
Log.e(TAG, "Could not create program");
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glAttachShader(program, pixelShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[1];
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Log.e(TAG, "Could not link program: ");
|
||||
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = 0;
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
static void checkGlError(String msg) throws TranscodingException {
|
||||
boolean failed = false;
|
||||
int error;
|
||||
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
|
||||
Log.e(TAG, msg + ": GLES20 error: 0x" + Integer.toHexString(error));
|
||||
failed = true;
|
||||
}
|
||||
if (failed) {
|
||||
throw new TranscodingException("GLES20 error encountered (see log)");
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter;
|
||||
|
||||
final class TranscodingException extends Exception {
|
||||
|
||||
TranscodingException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
TranscodingException(Throwable inner) {
|
||||
super(inner);
|
||||
}
|
||||
}
|
@@ -0,0 +1,116 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import android.util.SparseIntArray;
|
||||
|
||||
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.AudioSpecificConfig;
|
||||
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.DecoderConfigDescriptor;
|
||||
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.ESDescriptor;
|
||||
import org.mp4parser.boxes.iso14496.part1.objectdescriptors.SLConfigDescriptor;
|
||||
import org.mp4parser.boxes.iso14496.part12.SampleDescriptionBox;
|
||||
import org.mp4parser.boxes.iso14496.part14.ESDescriptorBox;
|
||||
import org.mp4parser.boxes.sampleentry.AudioSampleEntry;
|
||||
import org.mp4parser.streaming.extensions.DefaultSampleFlagsTrackExtension;
|
||||
import org.mp4parser.streaming.input.AbstractStreamingTrack;
|
||||
import org.mp4parser.streaming.input.StreamingSampleImpl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
abstract class AacTrack extends AbstractStreamingTrack {
|
||||
|
||||
private static final SparseIntArray SAMPLING_FREQUENCY_INDEX_MAP = new SparseIntArray();
|
||||
|
||||
static {
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(96000, 0);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(88200, 1);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(64000, 2);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(48000, 3);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(44100, 4);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(32000, 5);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(24000, 6);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(22050, 7);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(16000, 8);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(12000, 9);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(11025, 10);
|
||||
SAMPLING_FREQUENCY_INDEX_MAP.put(8000, 11);
|
||||
}
|
||||
|
||||
private final SampleDescriptionBox stsd;
|
||||
|
||||
private int sampleRate;
|
||||
|
||||
AacTrack(long avgBitrate, long maxBitrate, int sampleRate, int channelCount, int aacProfile) {
|
||||
this.sampleRate = sampleRate;
|
||||
|
||||
final DefaultSampleFlagsTrackExtension defaultSampleFlagsTrackExtension = new DefaultSampleFlagsTrackExtension();
|
||||
defaultSampleFlagsTrackExtension.setIsLeading(2);
|
||||
defaultSampleFlagsTrackExtension.setSampleDependsOn(2);
|
||||
defaultSampleFlagsTrackExtension.setSampleIsDependedOn(2);
|
||||
defaultSampleFlagsTrackExtension.setSampleHasRedundancy(2);
|
||||
defaultSampleFlagsTrackExtension.setSampleIsNonSyncSample(false);
|
||||
this.addTrackExtension(defaultSampleFlagsTrackExtension);
|
||||
|
||||
stsd = new SampleDescriptionBox();
|
||||
final AudioSampleEntry audioSampleEntry = new AudioSampleEntry("mp4a");
|
||||
if (channelCount == 7) {
|
||||
audioSampleEntry.setChannelCount(8);
|
||||
} else {
|
||||
audioSampleEntry.setChannelCount(channelCount);
|
||||
}
|
||||
audioSampleEntry.setSampleRate(sampleRate);
|
||||
audioSampleEntry.setDataReferenceIndex(1);
|
||||
audioSampleEntry.setSampleSize(16);
|
||||
|
||||
|
||||
final ESDescriptorBox esds = new ESDescriptorBox();
|
||||
ESDescriptor descriptor = new ESDescriptor();
|
||||
descriptor.setEsId(0);
|
||||
|
||||
final SLConfigDescriptor slConfigDescriptor = new SLConfigDescriptor();
|
||||
slConfigDescriptor.setPredefined(2);
|
||||
descriptor.setSlConfigDescriptor(slConfigDescriptor);
|
||||
|
||||
final DecoderConfigDescriptor decoderConfigDescriptor = new DecoderConfigDescriptor();
|
||||
decoderConfigDescriptor.setObjectTypeIndication(0x40 /*Audio ISO/IEC 14496-3*/);
|
||||
decoderConfigDescriptor.setStreamType(5 /*audio stream*/);
|
||||
decoderConfigDescriptor.setBufferSizeDB(1536);
|
||||
decoderConfigDescriptor.setMaxBitRate(maxBitrate);
|
||||
decoderConfigDescriptor.setAvgBitRate(avgBitrate);
|
||||
|
||||
final AudioSpecificConfig audioSpecificConfig = new AudioSpecificConfig();
|
||||
audioSpecificConfig.setOriginalAudioObjectType(aacProfile);
|
||||
audioSpecificConfig.setSamplingFrequencyIndex(SAMPLING_FREQUENCY_INDEX_MAP.get(sampleRate));
|
||||
audioSpecificConfig.setChannelConfiguration(channelCount);
|
||||
decoderConfigDescriptor.setAudioSpecificInfo(audioSpecificConfig);
|
||||
|
||||
descriptor.setDecoderConfigDescriptor(decoderConfigDescriptor);
|
||||
|
||||
esds.setEsDescriptor(descriptor);
|
||||
|
||||
audioSampleEntry.addBox(esds);
|
||||
stsd.addBox(audioSampleEntry);
|
||||
}
|
||||
|
||||
public long getTimescale() {
|
||||
return sampleRate;
|
||||
}
|
||||
|
||||
public String getHandler() {
|
||||
return "soun";
|
||||
}
|
||||
|
||||
public String getLanguage() {
|
||||
return "\u0060\u0060\u0060"; // 0 in Iso639
|
||||
}
|
||||
|
||||
public synchronized SampleDescriptionBox getSampleDescriptionBox() {
|
||||
return stsd;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
}
|
||||
|
||||
void processSample(ByteBuffer frame) throws IOException {
|
||||
sampleSink.acceptSample(new StreamingSampleImpl(frame, 1024), this);
|
||||
}
|
||||
}
|
@@ -0,0 +1,478 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.mp4parser.boxes.iso14496.part12.SampleDescriptionBox;
|
||||
import org.mp4parser.boxes.iso14496.part15.AvcConfigurationBox;
|
||||
import org.mp4parser.boxes.sampleentry.VisualSampleEntry;
|
||||
import org.mp4parser.streaming.SampleExtension;
|
||||
import org.mp4parser.streaming.StreamingSample;
|
||||
import org.mp4parser.streaming.extensions.CompositionTimeSampleExtension;
|
||||
import org.mp4parser.streaming.extensions.CompositionTimeTrackExtension;
|
||||
import org.mp4parser.streaming.extensions.DimensionTrackExtension;
|
||||
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
|
||||
import org.mp4parser.streaming.input.AbstractStreamingTrack;
|
||||
import org.mp4parser.streaming.input.StreamingSampleImpl;
|
||||
import org.mp4parser.streaming.input.h264.H264NalUnitHeader;
|
||||
import org.mp4parser.streaming.input.h264.H264NalUnitTypes;
|
||||
import org.mp4parser.streaming.input.h264.spspps.PictureParameterSet;
|
||||
import org.mp4parser.streaming.input.h264.spspps.SeqParameterSet;
|
||||
import org.mp4parser.streaming.input.h264.spspps.SliceHeader;
|
||||
import org.signal.core.util.logging.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
abstract class AvcTrack extends AbstractStreamingTrack {
|
||||
|
||||
private static final String TAG = "AvcTrack";
|
||||
|
||||
private int maxDecFrameBuffering = 16;
|
||||
private final List<StreamingSample> decFrameBuffer = new ArrayList<>();
|
||||
private final List<StreamingSample> decFrameBuffer2 = new ArrayList<>();
|
||||
|
||||
private final LinkedHashMap<Integer, ByteBuffer> spsIdToSpsBytes = new LinkedHashMap<>();
|
||||
private final LinkedHashMap<Integer, SeqParameterSet> spsIdToSps = new LinkedHashMap<>();
|
||||
private final LinkedHashMap<Integer, ByteBuffer> ppsIdToPpsBytes = new LinkedHashMap<>();
|
||||
private final LinkedHashMap<Integer, PictureParameterSet> ppsIdToPps = new LinkedHashMap<>();
|
||||
|
||||
private int timescale = 90000;
|
||||
private int frametick = 3000;
|
||||
|
||||
private final SampleDescriptionBox stsd;
|
||||
|
||||
private final List<ByteBuffer> bufferedNals = new ArrayList<>();
|
||||
private FirstVclNalDetector fvnd;
|
||||
private H264NalUnitHeader sliceNalUnitHeader;
|
||||
private long currentPresentationTimeUs;
|
||||
|
||||
AvcTrack(final @NonNull ByteBuffer spsBuffer, final @NonNull ByteBuffer ppsBuffer) {
|
||||
|
||||
handlePPS(ppsBuffer);
|
||||
|
||||
final SeqParameterSet sps = handleSPS(spsBuffer);
|
||||
|
||||
int width = (sps.pic_width_in_mbs_minus1 + 1) * 16;
|
||||
int mult = 2;
|
||||
if (sps.frame_mbs_only_flag) {
|
||||
mult = 1;
|
||||
}
|
||||
int height = 16 * (sps.pic_height_in_map_units_minus1 + 1) * mult;
|
||||
if (sps.frame_cropping_flag) {
|
||||
int chromaArrayType = 0;
|
||||
if (!sps.residual_color_transform_flag) {
|
||||
chromaArrayType = sps.chroma_format_idc.getId();
|
||||
}
|
||||
int cropUnitX = 1;
|
||||
int cropUnitY = mult;
|
||||
if (chromaArrayType != 0) {
|
||||
cropUnitX = sps.chroma_format_idc.getSubWidth();
|
||||
cropUnitY = sps.chroma_format_idc.getSubHeight() * mult;
|
||||
}
|
||||
|
||||
width -= cropUnitX * (sps.frame_crop_left_offset + sps.frame_crop_right_offset);
|
||||
height -= cropUnitY * (sps.frame_crop_top_offset + sps.frame_crop_bottom_offset);
|
||||
}
|
||||
|
||||
|
||||
final VisualSampleEntry visualSampleEntry = new VisualSampleEntry("avc1");
|
||||
visualSampleEntry.setDataReferenceIndex(1);
|
||||
visualSampleEntry.setDepth(24);
|
||||
visualSampleEntry.setFrameCount(1);
|
||||
visualSampleEntry.setHorizresolution(72);
|
||||
visualSampleEntry.setVertresolution(72);
|
||||
final DimensionTrackExtension dte = this.getTrackExtension(DimensionTrackExtension.class);
|
||||
if (dte == null) {
|
||||
this.addTrackExtension(new DimensionTrackExtension(width, height));
|
||||
}
|
||||
visualSampleEntry.setWidth(width);
|
||||
visualSampleEntry.setHeight(height);
|
||||
|
||||
visualSampleEntry.setCompressorname("AVC Coding");
|
||||
|
||||
final AvcConfigurationBox avcConfigurationBox = new AvcConfigurationBox();
|
||||
|
||||
avcConfigurationBox.setSequenceParameterSets(Collections.singletonList(spsBuffer));
|
||||
avcConfigurationBox.setPictureParameterSets(Collections.singletonList(ppsBuffer));
|
||||
avcConfigurationBox.setAvcLevelIndication(sps.level_idc);
|
||||
avcConfigurationBox.setAvcProfileIndication(sps.profile_idc);
|
||||
avcConfigurationBox.setBitDepthLumaMinus8(sps.bit_depth_luma_minus8);
|
||||
avcConfigurationBox.setBitDepthChromaMinus8(sps.bit_depth_chroma_minus8);
|
||||
avcConfigurationBox.setChromaFormat(sps.chroma_format_idc.getId());
|
||||
avcConfigurationBox.setConfigurationVersion(1);
|
||||
avcConfigurationBox.setLengthSizeMinusOne(3);
|
||||
|
||||
|
||||
avcConfigurationBox.setProfileCompatibility(
|
||||
(sps.constraint_set_0_flag ? 128 : 0) +
|
||||
(sps.constraint_set_1_flag ? 64 : 0) +
|
||||
(sps.constraint_set_2_flag ? 32 : 0) +
|
||||
(sps.constraint_set_3_flag ? 16 : 0) +
|
||||
(sps.constraint_set_4_flag ? 8 : 0) +
|
||||
(int) (sps.reserved_zero_2bits & 0x3)
|
||||
);
|
||||
|
||||
visualSampleEntry.addBox(avcConfigurationBox);
|
||||
stsd = new SampleDescriptionBox();
|
||||
stsd.addBox(visualSampleEntry);
|
||||
|
||||
int _timescale;
|
||||
int _frametick;
|
||||
if (sps.vuiParams != null) {
|
||||
_timescale = sps.vuiParams.time_scale >> 1; // Not sure why, but I found this in several places, and it works...
|
||||
_frametick = sps.vuiParams.num_units_in_tick;
|
||||
if (_timescale == 0 || _frametick == 0) {
|
||||
Log.w(TAG, "vuiParams contain invalid values: time_scale: " + _timescale + " and frame_tick: " + _frametick + ". Setting frame rate to 30fps");
|
||||
_timescale = 0;
|
||||
_frametick = 0;
|
||||
}
|
||||
if (_frametick > 0) {
|
||||
if (_timescale / _frametick > 100) {
|
||||
Log.w(TAG, "Framerate is " + (_timescale / _frametick) + ". That is suspicious.");
|
||||
}
|
||||
} else {
|
||||
Log.w(TAG, "Frametick is " + _frametick + ". That is suspicious.");
|
||||
}
|
||||
if (sps.vuiParams.bitstreamRestriction != null) {
|
||||
maxDecFrameBuffering = sps.vuiParams.bitstreamRestriction.max_dec_frame_buffering;
|
||||
}
|
||||
} else {
|
||||
Log.w(TAG, "Can't determine frame rate as SPS does not contain vuiParama");
|
||||
_timescale = 0;
|
||||
_frametick = 0;
|
||||
}
|
||||
if (_timescale != 0 && _frametick != 0) {
|
||||
timescale = _timescale;
|
||||
frametick = _frametick;
|
||||
}
|
||||
if (sps.pic_order_cnt_type == 0) {
|
||||
addTrackExtension(new CompositionTimeTrackExtension());
|
||||
} else if (sps.pic_order_cnt_type == 1) {
|
||||
throw new MuxingException("Have not yet imlemented pic_order_cnt_type 1");
|
||||
}
|
||||
}
|
||||
|
||||
public long getTimescale() {
|
||||
return timescale;
|
||||
}
|
||||
|
||||
public String getHandler() {
|
||||
return "vide";
|
||||
}
|
||||
|
||||
public String getLanguage() {
|
||||
return "\u0060\u0060\u0060"; // 0 in Iso639
|
||||
}
|
||||
|
||||
public SampleDescriptionBox getSampleDescriptionBox() {
|
||||
return stsd;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
}
|
||||
|
||||
private static H264NalUnitHeader getNalUnitHeader(@NonNull final ByteBuffer nal) {
|
||||
final H264NalUnitHeader nalUnitHeader = new H264NalUnitHeader();
|
||||
final int type = nal.get(0);
|
||||
nalUnitHeader.nal_ref_idc = (type >> 5) & 3;
|
||||
nalUnitHeader.nal_unit_type = type & 0x1f;
|
||||
return nalUnitHeader;
|
||||
}
|
||||
|
||||
void consumeNal(@NonNull final ByteBuffer nal, final long presentationTimeUs) throws IOException {
|
||||
|
||||
final H264NalUnitHeader nalUnitHeader = getNalUnitHeader(nal);
|
||||
switch (nalUnitHeader.nal_unit_type) {
|
||||
case H264NalUnitTypes.CODED_SLICE_NON_IDR:
|
||||
case H264NalUnitTypes.CODED_SLICE_DATA_PART_A:
|
||||
case H264NalUnitTypes.CODED_SLICE_DATA_PART_B:
|
||||
case H264NalUnitTypes.CODED_SLICE_DATA_PART_C:
|
||||
case H264NalUnitTypes.CODED_SLICE_IDR:
|
||||
final FirstVclNalDetector current = new FirstVclNalDetector(nal, nalUnitHeader.nal_ref_idc, nalUnitHeader.nal_unit_type);
|
||||
if (fvnd != null && fvnd.isFirstInNew(current)) {
|
||||
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
|
||||
bufferedNals.clear();
|
||||
}
|
||||
currentPresentationTimeUs = Math.max(currentPresentationTimeUs, presentationTimeUs);
|
||||
sliceNalUnitHeader = nalUnitHeader;
|
||||
fvnd = current;
|
||||
bufferedNals.add(nal);
|
||||
break;
|
||||
|
||||
case H264NalUnitTypes.SEI:
|
||||
case H264NalUnitTypes.AU_UNIT_DELIMITER:
|
||||
if (fvnd != null) {
|
||||
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
|
||||
bufferedNals.clear();
|
||||
fvnd = null;
|
||||
}
|
||||
bufferedNals.add(nal);
|
||||
break;
|
||||
|
||||
case H264NalUnitTypes.SEQ_PARAMETER_SET:
|
||||
if (fvnd != null) {
|
||||
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
|
||||
bufferedNals.clear();
|
||||
fvnd = null;
|
||||
}
|
||||
handleSPS(nal);
|
||||
break;
|
||||
|
||||
case H264NalUnitTypes.PIC_PARAMETER_SET:
|
||||
if (fvnd != null) {
|
||||
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, presentationTimeUs - currentPresentationTimeUs), false, false);
|
||||
bufferedNals.clear();
|
||||
fvnd = null;
|
||||
}
|
||||
handlePPS(nal);
|
||||
break;
|
||||
|
||||
case H264NalUnitTypes.END_OF_SEQUENCE:
|
||||
case H264NalUnitTypes.END_OF_STREAM:
|
||||
return;
|
||||
|
||||
case H264NalUnitTypes.SEQ_PARAMETER_SET_EXT:
|
||||
throw new IOException("Sequence parameter set extension is not yet handled. Needs TLC.");
|
||||
|
||||
default:
|
||||
Log.w(TAG, "Unknown NAL unit type: " + nalUnitHeader.nal_unit_type);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void consumeLastNal() throws IOException {
|
||||
pushSample(createSample(bufferedNals, fvnd.sliceHeader, sliceNalUnitHeader, 0), true, true);
|
||||
}
|
||||
|
||||
private void pushSample(final StreamingSample ss, final boolean all, final boolean force) throws IOException {
|
||||
if (ss != null) {
|
||||
decFrameBuffer.add(ss);
|
||||
}
|
||||
if (all) {
|
||||
while (decFrameBuffer.size() > 0) {
|
||||
pushSample(null, false, true);
|
||||
}
|
||||
} else {
|
||||
if ((decFrameBuffer.size() - 1 > maxDecFrameBuffering) || force) {
|
||||
final StreamingSample first = decFrameBuffer.remove(0);
|
||||
final PictureOrderCountType0SampleExtension poct0se = first.getSampleExtension(PictureOrderCountType0SampleExtension.class);
|
||||
if (poct0se == null) {
|
||||
sampleSink.acceptSample(first, this);
|
||||
} else {
|
||||
int delay = 0;
|
||||
for (StreamingSample streamingSample : decFrameBuffer) {
|
||||
if (poct0se.getPoc() > streamingSample.getSampleExtension(PictureOrderCountType0SampleExtension.class).getPoc()) {
|
||||
delay++;
|
||||
}
|
||||
}
|
||||
for (StreamingSample streamingSample : decFrameBuffer2) {
|
||||
if (poct0se.getPoc() < streamingSample.getSampleExtension(PictureOrderCountType0SampleExtension.class).getPoc()) {
|
||||
delay--;
|
||||
}
|
||||
}
|
||||
decFrameBuffer2.add(first);
|
||||
if (decFrameBuffer2.size() > maxDecFrameBuffering) {
|
||||
decFrameBuffer2.remove(0).removeSampleExtension(PictureOrderCountType0SampleExtension.class);
|
||||
}
|
||||
|
||||
first.addSampleExtension(CompositionTimeSampleExtension.create(delay * frametick));
|
||||
sampleSink.acceptSample(first, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private SampleFlagsSampleExtension createSampleFlagsSampleExtension(H264NalUnitHeader nu, SliceHeader sliceHeader) {
|
||||
final SampleFlagsSampleExtension sampleFlagsSampleExtension = new SampleFlagsSampleExtension();
|
||||
if (nu.nal_ref_idc == 0) {
|
||||
sampleFlagsSampleExtension.setSampleIsDependedOn(2);
|
||||
} else {
|
||||
sampleFlagsSampleExtension.setSampleIsDependedOn(1);
|
||||
}
|
||||
if ((sliceHeader.slice_type == SliceHeader.SliceType.I) || (sliceHeader.slice_type == SliceHeader.SliceType.SI)) {
|
||||
sampleFlagsSampleExtension.setSampleDependsOn(2);
|
||||
} else {
|
||||
sampleFlagsSampleExtension.setSampleDependsOn(1);
|
||||
}
|
||||
sampleFlagsSampleExtension.setSampleIsNonSyncSample(H264NalUnitTypes.CODED_SLICE_IDR != nu.nal_unit_type);
|
||||
return sampleFlagsSampleExtension;
|
||||
}
|
||||
|
||||
private PictureOrderCountType0SampleExtension createPictureOrderCountType0SampleExtension(SliceHeader sliceHeader) {
|
||||
if (sliceHeader.sps.pic_order_cnt_type == 0) {
|
||||
return new PictureOrderCountType0SampleExtension(
|
||||
sliceHeader, decFrameBuffer.size() > 0 ?
|
||||
decFrameBuffer.get(decFrameBuffer.size() - 1).getSampleExtension(PictureOrderCountType0SampleExtension.class) :
|
||||
null);
|
||||
/* decFrameBuffer.add(ssi);
|
||||
if (decFrameBuffer.size() - 1 > maxDecFrameBuffering) { // just added one
|
||||
drainDecPictureBuffer(false);
|
||||
}*/
|
||||
} else if (sliceHeader.sps.pic_order_cnt_type == 1) {
|
||||
throw new MuxingException("pic_order_cnt_type == 1 needs to be implemented");
|
||||
} else if (sliceHeader.sps.pic_order_cnt_type == 2) {
|
||||
return null; // no ctts
|
||||
}
|
||||
throw new MuxingException("I don't know sliceHeader.sps.pic_order_cnt_type of " + sliceHeader.sps.pic_order_cnt_type);
|
||||
}
|
||||
|
||||
|
||||
private StreamingSample createSample(List<ByteBuffer> nals, SliceHeader sliceHeader, H264NalUnitHeader nu, long sampleDurationNs) {
|
||||
final long sampleDuration = getTimescale() * Math.max(0, sampleDurationNs) / 1000000L;
|
||||
final StreamingSample ss = new StreamingSampleImpl(nals, sampleDuration);
|
||||
ss.addSampleExtension(createSampleFlagsSampleExtension(nu, sliceHeader));
|
||||
final SampleExtension pictureOrderCountType0SampleExtension = createPictureOrderCountType0SampleExtension(sliceHeader);
|
||||
if (pictureOrderCountType0SampleExtension != null) {
|
||||
ss.addSampleExtension(pictureOrderCountType0SampleExtension);
|
||||
}
|
||||
return ss;
|
||||
}
|
||||
|
||||
private void handlePPS(final @NonNull ByteBuffer nal) {
|
||||
nal.position(1);
|
||||
try {
|
||||
final PictureParameterSet _pictureParameterSet = PictureParameterSet.read(nal);
|
||||
final ByteBuffer oldPpsSameId = ppsIdToPpsBytes.get(_pictureParameterSet.pic_parameter_set_id);
|
||||
if (oldPpsSameId != null && !oldPpsSameId.equals(nal)) {
|
||||
throw new MuxingException("OMG - I got two SPS with same ID but different settings! (AVC3 is the solution)");
|
||||
} else {
|
||||
ppsIdToPpsBytes.put(_pictureParameterSet.pic_parameter_set_id, nal);
|
||||
ppsIdToPps.put(_pictureParameterSet.pic_parameter_set_id, _pictureParameterSet);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new MuxingException("That's surprising to get IOException when working on ByteArrayInputStream", e);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
private @NonNull SeqParameterSet handleSPS(final @NonNull ByteBuffer nal) {
|
||||
nal.position(1);
|
||||
try {
|
||||
final SeqParameterSet seqParameterSet = SeqParameterSet.read(nal);
|
||||
final ByteBuffer oldSpsSameId = spsIdToSpsBytes.get(seqParameterSet.seq_parameter_set_id);
|
||||
if (oldSpsSameId != null && !oldSpsSameId.equals(nal)) {
|
||||
throw new MuxingException("OMG - I got two SPS with same ID but different settings!");
|
||||
} else {
|
||||
spsIdToSpsBytes.put(seqParameterSet.seq_parameter_set_id, nal);
|
||||
spsIdToSps.put(seqParameterSet.seq_parameter_set_id, seqParameterSet);
|
||||
}
|
||||
return seqParameterSet;
|
||||
} catch (IOException e) {
|
||||
throw new MuxingException("That's surprising to get IOException when working on ByteArrayInputStream", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class FirstVclNalDetector {
|
||||
|
||||
final SliceHeader sliceHeader;
|
||||
final int frame_num;
|
||||
final int pic_parameter_set_id;
|
||||
final boolean field_pic_flag;
|
||||
final boolean bottom_field_flag;
|
||||
final int nal_ref_idc;
|
||||
final int pic_order_cnt_type;
|
||||
final int delta_pic_order_cnt_bottom;
|
||||
final int pic_order_cnt_lsb;
|
||||
final int delta_pic_order_cnt_0;
|
||||
final int delta_pic_order_cnt_1;
|
||||
final int idr_pic_id;
|
||||
|
||||
FirstVclNalDetector(ByteBuffer nal, int nal_ref_idc, int nal_unit_type) {
|
||||
|
||||
SliceHeader sh = new SliceHeader(nal, spsIdToSps, ppsIdToPps, nal_unit_type == 5);
|
||||
this.sliceHeader = sh;
|
||||
this.frame_num = sh.frame_num;
|
||||
this.pic_parameter_set_id = sh.pic_parameter_set_id;
|
||||
this.field_pic_flag = sh.field_pic_flag;
|
||||
this.bottom_field_flag = sh.bottom_field_flag;
|
||||
this.nal_ref_idc = nal_ref_idc;
|
||||
this.pic_order_cnt_type = spsIdToSps.get(ppsIdToPps.get(sh.pic_parameter_set_id).seq_parameter_set_id).pic_order_cnt_type;
|
||||
this.delta_pic_order_cnt_bottom = sh.delta_pic_order_cnt_bottom;
|
||||
this.pic_order_cnt_lsb = sh.pic_order_cnt_lsb;
|
||||
this.delta_pic_order_cnt_0 = sh.delta_pic_order_cnt_0;
|
||||
this.delta_pic_order_cnt_1 = sh.delta_pic_order_cnt_1;
|
||||
this.idr_pic_id = sh.idr_pic_id;
|
||||
}
|
||||
|
||||
boolean isFirstInNew(FirstVclNalDetector nu) {
|
||||
if (nu.frame_num != frame_num) {
|
||||
return true;
|
||||
}
|
||||
if (nu.pic_parameter_set_id != pic_parameter_set_id) {
|
||||
return true;
|
||||
}
|
||||
if (nu.field_pic_flag != field_pic_flag) {
|
||||
return true;
|
||||
}
|
||||
if (nu.field_pic_flag) {
|
||||
if (nu.bottom_field_flag != bottom_field_flag) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (nu.nal_ref_idc != nal_ref_idc) {
|
||||
return true;
|
||||
}
|
||||
if (nu.pic_order_cnt_type == 0 && pic_order_cnt_type == 0) {
|
||||
if (nu.pic_order_cnt_lsb != pic_order_cnt_lsb) {
|
||||
return true;
|
||||
}
|
||||
if (nu.delta_pic_order_cnt_bottom != delta_pic_order_cnt_bottom) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (nu.pic_order_cnt_type == 1 && pic_order_cnt_type == 1) {
|
||||
if (nu.delta_pic_order_cnt_0 != delta_pic_order_cnt_0) {
|
||||
return true;
|
||||
}
|
||||
if (nu.delta_pic_order_cnt_1 != delta_pic_order_cnt_1) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static class PictureOrderCountType0SampleExtension implements SampleExtension {
|
||||
int picOrderCntMsb;
|
||||
int picOrderCountLsb;
|
||||
|
||||
PictureOrderCountType0SampleExtension(final @NonNull SliceHeader currentSlice, final @Nullable PictureOrderCountType0SampleExtension previous) {
|
||||
int prevPicOrderCntLsb = 0;
|
||||
int prevPicOrderCntMsb = 0;
|
||||
if (previous != null) {
|
||||
prevPicOrderCntLsb = previous.picOrderCountLsb;
|
||||
prevPicOrderCntMsb = previous.picOrderCntMsb;
|
||||
}
|
||||
|
||||
final int maxPicOrderCountLsb = (1 << (currentSlice.sps.log2_max_pic_order_cnt_lsb_minus4 + 4));
|
||||
// System.out.print(" pic_order_cnt_lsb " + pic_order_cnt_lsb + " " + max_pic_order_count);
|
||||
picOrderCountLsb = currentSlice.pic_order_cnt_lsb;
|
||||
picOrderCntMsb = 0;
|
||||
if ((picOrderCountLsb < prevPicOrderCntLsb) && ((prevPicOrderCntLsb - picOrderCountLsb) >= (maxPicOrderCountLsb / 2))) {
|
||||
picOrderCntMsb = prevPicOrderCntMsb + maxPicOrderCountLsb;
|
||||
} else if ((picOrderCountLsb > prevPicOrderCntLsb) && ((picOrderCountLsb - prevPicOrderCntLsb) > (maxPicOrderCountLsb / 2))) {
|
||||
picOrderCntMsb = prevPicOrderCntMsb - maxPicOrderCountLsb;
|
||||
} else {
|
||||
picOrderCntMsb = prevPicOrderCntMsb;
|
||||
}
|
||||
}
|
||||
|
||||
int getPoc() {
|
||||
return picOrderCntMsb + picOrderCountLsb;
|
||||
}
|
||||
|
||||
@NonNull
|
||||
@Override
|
||||
public String toString() {
|
||||
return "picOrderCntMsb=" + picOrderCntMsb + ", picOrderCountLsb=" + picOrderCountLsb;
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,99 @@
|
||||
/*
|
||||
* Copyright 2008-2019 JCodecProject
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
* list of conditions and the following disclaimer. Redistributions in binary form
|
||||
* must reproduce the above copyright notice, this list of conditions and the
|
||||
* following disclaimer in the documentation and/or other materials provided with
|
||||
* the distribution.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* https://github.com/jcodec/jcodec/blob/master/src/main/java/org/jcodec/codecs/h264/H264Utils.java
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
final class H264Utils {
|
||||
|
||||
private H264Utils() {}
|
||||
|
||||
static @NonNull List<ByteBuffer> getNals(ByteBuffer buffer) {
|
||||
final List<ByteBuffer> nals = new ArrayList<>();
|
||||
ByteBuffer nal;
|
||||
while ((nal = nextNALUnit(buffer)) != null) {
|
||||
nals.add(nal);
|
||||
}
|
||||
return nals;
|
||||
}
|
||||
|
||||
static ByteBuffer nextNALUnit(ByteBuffer buf) {
|
||||
skipToNALUnit(buf);
|
||||
return gotoNALUnit(buf);
|
||||
}
|
||||
|
||||
static void skipToNALUnit(ByteBuffer buf) {
|
||||
if (!buf.hasRemaining())
|
||||
return;
|
||||
|
||||
int val = 0xffffffff;
|
||||
while (buf.hasRemaining()) {
|
||||
val <<= 8;
|
||||
val |= (buf.get() & 0xff);
|
||||
if ((val & 0xffffff) == 1) {
|
||||
buf.position(buf.position());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds next Nth H.264 bitstream NAL unit (0x00000001) and returns the data
|
||||
* that preceeds it as a ByteBuffer slice
|
||||
* <p>
|
||||
* Segment byte order is always little endian
|
||||
* <p>
|
||||
* TODO: emulation prevention
|
||||
*/
|
||||
static ByteBuffer gotoNALUnit(ByteBuffer buf) {
|
||||
|
||||
if (!buf.hasRemaining())
|
||||
return null;
|
||||
|
||||
int from = buf.position();
|
||||
ByteBuffer result = buf.slice();
|
||||
result.order(ByteOrder.BIG_ENDIAN);
|
||||
|
||||
int val = 0xffffffff;
|
||||
while (buf.hasRemaining()) {
|
||||
val <<= 8;
|
||||
val |= (buf.get() & 0xff);
|
||||
if ((val & 0xffffff) == 1) {
|
||||
buf.position(buf.position() - (val == 1 ? 4 : 3));
|
||||
result.limit(buf.position() - from);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -0,0 +1,261 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
|
||||
import org.mp4parser.boxes.iso14496.part12.SampleDescriptionBox;
|
||||
import org.mp4parser.boxes.iso14496.part15.HevcConfigurationBox;
|
||||
import org.mp4parser.boxes.iso14496.part15.HevcDecoderConfigurationRecord;
|
||||
import org.mp4parser.boxes.sampleentry.VisualSampleEntry;
|
||||
import org.mp4parser.muxer.tracks.CleanInputStream;
|
||||
import org.mp4parser.muxer.tracks.h265.H265NalUnitHeader;
|
||||
import org.mp4parser.muxer.tracks.h265.H265NalUnitTypes;
|
||||
import org.mp4parser.muxer.tracks.h265.SequenceParameterSetRbsp;
|
||||
import org.mp4parser.streaming.StreamingSample;
|
||||
import org.mp4parser.streaming.extensions.DimensionTrackExtension;
|
||||
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
|
||||
import org.mp4parser.streaming.input.AbstractStreamingTrack;
|
||||
import org.mp4parser.streaming.input.StreamingSampleImpl;
|
||||
import org.mp4parser.tools.ByteBufferByteChannel;
|
||||
import org.mp4parser.tools.IsoTypeReader;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.Channels;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
abstract class HevcTrack extends AbstractStreamingTrack implements H265NalUnitTypes {
|
||||
|
||||
private final ArrayList<ByteBuffer> bufferedNals = new ArrayList<>();
|
||||
private boolean vclNalUnitSeenInAU;
|
||||
private boolean isIdr = true;
|
||||
private long currentPresentationTimeUs;
|
||||
private final SampleDescriptionBox stsd;
|
||||
|
||||
HevcTrack(final @NonNull List<ByteBuffer> csd) throws IOException {
|
||||
final ArrayList<ByteBuffer> sps = new ArrayList<>();
|
||||
final ArrayList<ByteBuffer> pps = new ArrayList<>();
|
||||
final ArrayList<ByteBuffer> vps = new ArrayList<>();
|
||||
SequenceParameterSetRbsp spsStruct = null;
|
||||
for (ByteBuffer nal : csd) {
|
||||
final H265NalUnitHeader unitHeader = getNalUnitHeader(nal);
|
||||
nal.position(0);
|
||||
// collect sps/vps/pps
|
||||
switch (unitHeader.nalUnitType) {
|
||||
case NAL_TYPE_PPS_NUT:
|
||||
pps.add(nal.duplicate());
|
||||
break;
|
||||
case NAL_TYPE_VPS_NUT:
|
||||
vps.add(nal.duplicate());
|
||||
break;
|
||||
case NAL_TYPE_SPS_NUT:
|
||||
sps.add(nal.duplicate());
|
||||
nal.position(2);
|
||||
spsStruct = new SequenceParameterSetRbsp(new CleanInputStream(Channels.newInputStream(new ByteBufferByteChannel(nal.slice()))));
|
||||
break;
|
||||
case NAL_TYPE_PREFIX_SEI_NUT:
|
||||
//new SEIMessage(new BitReaderBuffer(nal.slice()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
stsd = new SampleDescriptionBox();
|
||||
stsd.addBox(createSampleEntry(sps, pps, vps, spsStruct));
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTimescale() {
|
||||
return 90000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHandler() {
|
||||
return "vide";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLanguage() {
|
||||
return "\u0060\u0060\u0060"; // 0 in Iso639
|
||||
}
|
||||
|
||||
@Override
|
||||
public SampleDescriptionBox getSampleDescriptionBox() {
|
||||
return stsd;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
void consumeLastNal() throws IOException {
|
||||
wrapUp(bufferedNals, currentPresentationTimeUs);
|
||||
}
|
||||
|
||||
void consumeNal(final @NonNull ByteBuffer nal, final long presentationTimeUs) throws IOException {
|
||||
|
||||
final H265NalUnitHeader unitHeader = getNalUnitHeader(nal);
|
||||
final boolean isVcl = isVcl(unitHeader);
|
||||
//
|
||||
if (vclNalUnitSeenInAU) { // we need at least 1 VCL per AU
|
||||
// This branch checks if we encountered the start of a samples/AU
|
||||
if (isVcl) {
|
||||
if ((nal.get(2) & -128) != 0) { // this is: first_slice_segment_in_pic_flag u(1)
|
||||
wrapUp(bufferedNals, presentationTimeUs);
|
||||
}
|
||||
} else {
|
||||
switch (unitHeader.nalUnitType) {
|
||||
case NAL_TYPE_PREFIX_SEI_NUT:
|
||||
case NAL_TYPE_AUD_NUT:
|
||||
case NAL_TYPE_PPS_NUT:
|
||||
case NAL_TYPE_VPS_NUT:
|
||||
case NAL_TYPE_SPS_NUT:
|
||||
case NAL_TYPE_RSV_NVCL41:
|
||||
case NAL_TYPE_RSV_NVCL42:
|
||||
case NAL_TYPE_RSV_NVCL43:
|
||||
case NAL_TYPE_RSV_NVCL44:
|
||||
case NAL_TYPE_UNSPEC48:
|
||||
case NAL_TYPE_UNSPEC49:
|
||||
case NAL_TYPE_UNSPEC50:
|
||||
case NAL_TYPE_UNSPEC51:
|
||||
case NAL_TYPE_UNSPEC52:
|
||||
case NAL_TYPE_UNSPEC53:
|
||||
case NAL_TYPE_UNSPEC54:
|
||||
case NAL_TYPE_UNSPEC55:
|
||||
|
||||
case NAL_TYPE_EOB_NUT: // a bit special but also causes a sample to be formed
|
||||
case NAL_TYPE_EOS_NUT:
|
||||
wrapUp(bufferedNals, presentationTimeUs);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
switch (unitHeader.nalUnitType) {
|
||||
case NAL_TYPE_SPS_NUT:
|
||||
case NAL_TYPE_VPS_NUT:
|
||||
case NAL_TYPE_PPS_NUT:
|
||||
case NAL_TYPE_EOB_NUT:
|
||||
case NAL_TYPE_EOS_NUT:
|
||||
case NAL_TYPE_AUD_NUT:
|
||||
case NAL_TYPE_FD_NUT:
|
||||
// ignore these
|
||||
break;
|
||||
default:
|
||||
bufferedNals.add(nal);
|
||||
break;
|
||||
}
|
||||
|
||||
if (isVcl) {
|
||||
isIdr = unitHeader.nalUnitType == NAL_TYPE_IDR_W_RADL || unitHeader.nalUnitType == NAL_TYPE_IDR_N_LP;
|
||||
vclNalUnitSeenInAU = true;
|
||||
}
|
||||
}
|
||||
|
||||
private void wrapUp(final @NonNull List<ByteBuffer> nals, final long presentationTimeUs) throws IOException {
|
||||
|
||||
final long duration = presentationTimeUs - currentPresentationTimeUs;
|
||||
currentPresentationTimeUs = presentationTimeUs;
|
||||
|
||||
final StreamingSample sample = new StreamingSampleImpl(
|
||||
nals, getTimescale() * Math.max(0, duration) / 1000000L);
|
||||
|
||||
final SampleFlagsSampleExtension sampleFlagsSampleExtension = new SampleFlagsSampleExtension();
|
||||
sampleFlagsSampleExtension.setSampleIsNonSyncSample(!isIdr);
|
||||
|
||||
sample.addSampleExtension(sampleFlagsSampleExtension);
|
||||
|
||||
sampleSink.acceptSample(sample, this);
|
||||
|
||||
vclNalUnitSeenInAU = false;
|
||||
isIdr = true;
|
||||
nals.clear();
|
||||
}
|
||||
|
||||
private static @NonNull H265NalUnitHeader getNalUnitHeader(final @NonNull ByteBuffer nal) {
|
||||
nal.position(0);
|
||||
final int nalUnitHeaderValue = IsoTypeReader.readUInt16(nal);
|
||||
final H265NalUnitHeader nalUnitHeader = new H265NalUnitHeader();
|
||||
nalUnitHeader.forbiddenZeroFlag = (nalUnitHeaderValue & 0x8000) >> 15;
|
||||
nalUnitHeader.nalUnitType = (nalUnitHeaderValue & 0x7E00) >> 9;
|
||||
nalUnitHeader.nuhLayerId = (nalUnitHeaderValue & 0x1F8) >> 3;
|
||||
nalUnitHeader.nuhTemporalIdPlusOne = (nalUnitHeaderValue & 0x7);
|
||||
return nalUnitHeader;
|
||||
}
|
||||
|
||||
private @NonNull VisualSampleEntry createSampleEntry(
|
||||
final @NonNull ArrayList<ByteBuffer> sps,
|
||||
final @NonNull ArrayList<ByteBuffer> pps,
|
||||
final @NonNull ArrayList<ByteBuffer> vps,
|
||||
final @Nullable SequenceParameterSetRbsp spsStruct)
|
||||
{
|
||||
final VisualSampleEntry visualSampleEntry = new VisualSampleEntry("hvc1");
|
||||
visualSampleEntry.setDataReferenceIndex(1);
|
||||
visualSampleEntry.setDepth(24);
|
||||
visualSampleEntry.setFrameCount(1);
|
||||
visualSampleEntry.setHorizresolution(72);
|
||||
visualSampleEntry.setVertresolution(72);
|
||||
visualSampleEntry.setCompressorname("HEVC Coding");
|
||||
|
||||
final HevcConfigurationBox hevcConfigurationBox = new HevcConfigurationBox();
|
||||
hevcConfigurationBox.getHevcDecoderConfigurationRecord().setConfigurationVersion(1);
|
||||
|
||||
if (spsStruct != null) {
|
||||
visualSampleEntry.setWidth(spsStruct.pic_width_in_luma_samples);
|
||||
visualSampleEntry.setHeight(spsStruct.pic_height_in_luma_samples);
|
||||
final DimensionTrackExtension dte = this.getTrackExtension(DimensionTrackExtension.class);
|
||||
if (dte == null) {
|
||||
this.addTrackExtension(new DimensionTrackExtension(spsStruct.pic_width_in_luma_samples, spsStruct.pic_height_in_luma_samples));
|
||||
}
|
||||
final HevcDecoderConfigurationRecord hevcDecoderConfigurationRecord = hevcConfigurationBox.getHevcDecoderConfigurationRecord();
|
||||
hevcDecoderConfigurationRecord.setChromaFormat(spsStruct.chroma_format_idc);
|
||||
hevcDecoderConfigurationRecord.setGeneral_profile_idc(spsStruct.general_profile_idc);
|
||||
hevcDecoderConfigurationRecord.setGeneral_profile_compatibility_flags(spsStruct.general_profile_compatibility_flags);
|
||||
hevcDecoderConfigurationRecord.setGeneral_constraint_indicator_flags(spsStruct.general_constraint_indicator_flags);
|
||||
hevcDecoderConfigurationRecord.setGeneral_level_idc(spsStruct.general_level_idc);
|
||||
hevcDecoderConfigurationRecord.setGeneral_tier_flag(spsStruct.general_tier_flag);
|
||||
hevcDecoderConfigurationRecord.setGeneral_profile_space(spsStruct.general_profile_space);
|
||||
hevcDecoderConfigurationRecord.setBitDepthChromaMinus8(spsStruct.bit_depth_chroma_minus8);
|
||||
hevcDecoderConfigurationRecord.setBitDepthLumaMinus8(spsStruct.bit_depth_luma_minus8);
|
||||
hevcDecoderConfigurationRecord.setTemporalIdNested(spsStruct.sps_temporal_id_nesting_flag);
|
||||
}
|
||||
|
||||
hevcConfigurationBox.getHevcDecoderConfigurationRecord().setLengthSizeMinusOne(3);
|
||||
|
||||
final HevcDecoderConfigurationRecord.Array vpsArray = new HevcDecoderConfigurationRecord.Array();
|
||||
vpsArray.array_completeness = false;
|
||||
vpsArray.nal_unit_type = NAL_TYPE_VPS_NUT;
|
||||
vpsArray.nalUnits = new ArrayList<>();
|
||||
for (ByteBuffer vp : vps) {
|
||||
vpsArray.nalUnits.add(Utils.toArray(vp));
|
||||
}
|
||||
|
||||
final HevcDecoderConfigurationRecord.Array spsArray = new HevcDecoderConfigurationRecord.Array();
|
||||
spsArray.array_completeness = false;
|
||||
spsArray.nal_unit_type = NAL_TYPE_SPS_NUT;
|
||||
spsArray.nalUnits = new ArrayList<>();
|
||||
for (ByteBuffer sp : sps) {
|
||||
spsArray.nalUnits.add(Utils.toArray(sp));
|
||||
}
|
||||
|
||||
final HevcDecoderConfigurationRecord.Array ppsArray = new HevcDecoderConfigurationRecord.Array();
|
||||
ppsArray.array_completeness = false;
|
||||
ppsArray.nal_unit_type = NAL_TYPE_PPS_NUT;
|
||||
ppsArray.nalUnits = new ArrayList<>();
|
||||
for (ByteBuffer pp : pps) {
|
||||
ppsArray.nalUnits.add(Utils.toArray(pp));
|
||||
}
|
||||
|
||||
hevcConfigurationBox.getArrays().addAll(Arrays.asList(spsArray, vpsArray, ppsArray));
|
||||
|
||||
visualSampleEntry.addBox(hevcConfigurationBox);
|
||||
return visualSampleEntry;
|
||||
}
|
||||
|
||||
private boolean isVcl(final @NonNull H265NalUnitHeader nalUnitHeader) {
|
||||
return nalUnitHeader.nalUnitType >= 0 && nalUnitHeader.nalUnitType <= 31;
|
||||
}
|
||||
}
|
@@ -0,0 +1,424 @@
|
||||
/*
|
||||
* Copyright (C) https://github.com/sannies/mp4parser/blob/master/LICENSE
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* https://github.com/sannies/mp4parser/blob/4ed724754cde751c3f27fdda51f288df4f4c5db5/streaming/src/main/java/org/mp4parser/streaming/output/mp4/StandardMp4Writer.java
|
||||
*
|
||||
* This file has been modified by Signal.
|
||||
*/
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import org.mp4parser.Box;
|
||||
import org.mp4parser.boxes.iso14496.part12.ChunkOffsetBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.CompositionTimeToSample;
|
||||
import org.mp4parser.boxes.iso14496.part12.FileTypeBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.MediaHeaderBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.MovieBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.MovieHeaderBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.SampleSizeBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.SampleTableBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.SampleToChunkBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.SyncSampleBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.TimeToSampleBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.TrackBox;
|
||||
import org.mp4parser.boxes.iso14496.part12.TrackHeaderBox;
|
||||
import org.mp4parser.streaming.StreamingSample;
|
||||
import org.mp4parser.streaming.StreamingTrack;
|
||||
import org.mp4parser.streaming.extensions.CompositionTimeSampleExtension;
|
||||
import org.mp4parser.streaming.extensions.CompositionTimeTrackExtension;
|
||||
import org.mp4parser.streaming.extensions.SampleFlagsSampleExtension;
|
||||
import org.mp4parser.streaming.extensions.TrackIdTrackExtension;
|
||||
import org.mp4parser.streaming.output.SampleSink;
|
||||
import org.mp4parser.streaming.output.mp4.DefaultBoxes;
|
||||
import org.mp4parser.tools.Mp4Arrays;
|
||||
import org.mp4parser.tools.Mp4Math;
|
||||
import org.mp4parser.tools.Path;
|
||||
import org.signal.core.util.logging.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.WritableByteChannel;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static org.mp4parser.tools.CastUtils.l2i;
|
||||
|
||||
/**
|
||||
* Creates an MP4 file with ftyp, mdat+, moov order.
|
||||
* A very special property of this variant is that it written sequentially. You can start transferring the
|
||||
* data while the <code>sink</code> receives it. (in contrast to typical implementations which need random
|
||||
* access to write length fields at the beginning of the file)
|
||||
*/
|
||||
final class Mp4Writer extends DefaultBoxes implements SampleSink {
|
||||
|
||||
private static final String TAG = "Mp4Writer";
|
||||
|
||||
private final WritableByteChannel sink;
|
||||
private final List<StreamingTrack> source;
|
||||
private final Date creationTime = new Date();
|
||||
|
||||
|
||||
/**
|
||||
* Contains the start time of the next segment in line that will be created.
|
||||
*/
|
||||
private final Map<StreamingTrack, Long> nextChunkCreateStartTime = new ConcurrentHashMap<>();
|
||||
/**
|
||||
* Contains the start time of the next segment in line that will be written.
|
||||
*/
|
||||
private final Map<StreamingTrack, Long> nextChunkWriteStartTime = new ConcurrentHashMap<>();
|
||||
/**
|
||||
* Contains the next sample's start time.
|
||||
*/
|
||||
private final Map<StreamingTrack, Long> nextSampleStartTime = new HashMap<>();
|
||||
/**
|
||||
* Buffers the samples per track until there are enough samples to form a Segment.
|
||||
*/
|
||||
private final Map<StreamingTrack, List<StreamingSample>> sampleBuffers = new HashMap<>();
|
||||
private final Map<StreamingTrack, TrackBox> trackBoxes = new HashMap<>();
|
||||
/**
|
||||
* Buffers segments until it's time for a segment to be written.
|
||||
*/
|
||||
private final Map<StreamingTrack, Queue<ChunkContainer>> chunkBuffers = new ConcurrentHashMap<>();
|
||||
private final Map<StreamingTrack, Long> chunkNumbers = new HashMap<>();
|
||||
private final Map<StreamingTrack, Long> sampleNumbers = new HashMap<>();
|
||||
private long bytesWritten = 0;
|
||||
|
||||
Mp4Writer(final @NonNull List<StreamingTrack> source, final @NonNull WritableByteChannel sink) throws IOException {
|
||||
this.source = new ArrayList<>(source);
|
||||
this.sink = sink;
|
||||
|
||||
final HashSet<Long> trackIds = new HashSet<>();
|
||||
for (StreamingTrack streamingTrack : source) {
|
||||
streamingTrack.setSampleSink(this);
|
||||
chunkNumbers.put(streamingTrack, 1L);
|
||||
sampleNumbers.put(streamingTrack, 1L);
|
||||
nextSampleStartTime.put(streamingTrack, 0L);
|
||||
nextChunkCreateStartTime.put(streamingTrack, 0L);
|
||||
nextChunkWriteStartTime.put(streamingTrack, 0L);
|
||||
sampleBuffers.put(streamingTrack, new ArrayList<>());
|
||||
chunkBuffers.put(streamingTrack, new LinkedList<>());
|
||||
if (streamingTrack.getTrackExtension(TrackIdTrackExtension.class) != null) {
|
||||
final TrackIdTrackExtension trackIdTrackExtension = streamingTrack.getTrackExtension(TrackIdTrackExtension.class);
|
||||
if (trackIds.contains(trackIdTrackExtension.getTrackId())) {
|
||||
throw new MuxingException("There may not be two tracks with the same trackID within one file");
|
||||
}
|
||||
trackIds.add(trackIdTrackExtension.getTrackId());
|
||||
}
|
||||
}
|
||||
for (StreamingTrack streamingTrack : source) {
|
||||
if (streamingTrack.getTrackExtension(TrackIdTrackExtension.class) == null) {
|
||||
long maxTrackId = 0;
|
||||
for (Long trackId : trackIds) {
|
||||
maxTrackId = Math.max(trackId, maxTrackId);
|
||||
}
|
||||
final TrackIdTrackExtension tiExt = new TrackIdTrackExtension(maxTrackId + 1);
|
||||
trackIds.add(tiExt.getTrackId());
|
||||
streamingTrack.addTrackExtension(tiExt);
|
||||
}
|
||||
}
|
||||
|
||||
final List<String> minorBrands = new LinkedList<>();
|
||||
minorBrands.add("isom");
|
||||
minorBrands.add("mp42");
|
||||
write(sink, new FileTypeBox("mp42", 0, minorBrands));
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
for (StreamingTrack streamingTrack : source) {
|
||||
writeChunkContainer(createChunkContainer(streamingTrack));
|
||||
streamingTrack.close();
|
||||
}
|
||||
write(sink, createMoov());
|
||||
}
|
||||
|
||||
private Box createMoov() {
|
||||
final MovieBox movieBox = new MovieBox();
|
||||
|
||||
final MovieHeaderBox mvhd = createMvhd();
|
||||
movieBox.addBox(mvhd);
|
||||
|
||||
// update durations
|
||||
for (StreamingTrack streamingTrack : source) {
|
||||
final TrackBox tb = trackBoxes.get(streamingTrack);
|
||||
final MediaHeaderBox mdhd = Path.getPath(tb, "mdia[0]/mdhd[0]");
|
||||
mdhd.setCreationTime(creationTime);
|
||||
mdhd.setModificationTime(creationTime);
|
||||
mdhd.setDuration(Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)));
|
||||
mdhd.setTimescale(streamingTrack.getTimescale());
|
||||
mdhd.setLanguage(streamingTrack.getLanguage());
|
||||
movieBox.addBox(tb);
|
||||
|
||||
final TrackHeaderBox tkhd = Path.getPath(tb, "tkhd[0]");
|
||||
final double duration = (double) Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) / streamingTrack.getTimescale();
|
||||
tkhd.setDuration((long) (mvhd.getTimescale() * duration));
|
||||
}
|
||||
|
||||
// metadata here
|
||||
return movieBox;
|
||||
}
|
||||
|
||||
private void sortTracks() {
|
||||
Collections.sort(source, (o1, o2) -> {
|
||||
// compare times and account for timestamps!
|
||||
final long a = Objects.requireNonNull(nextChunkWriteStartTime.get(o1)) * o2.getTimescale();
|
||||
final long b = Objects.requireNonNull(nextChunkWriteStartTime.get(o2)) * o1.getTimescale();
|
||||
return (int) Math.signum(a - b);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MovieHeaderBox createMvhd() {
|
||||
final MovieHeaderBox mvhd = new MovieHeaderBox();
|
||||
mvhd.setVersion(1);
|
||||
mvhd.setCreationTime(creationTime);
|
||||
mvhd.setModificationTime(creationTime);
|
||||
|
||||
|
||||
long[] timescales = new long[0];
|
||||
long maxTrackId = 0;
|
||||
double duration = 0;
|
||||
for (StreamingTrack streamingTrack : source) {
|
||||
duration = Math.max((double) Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) / streamingTrack.getTimescale(), duration);
|
||||
timescales = Mp4Arrays.copyOfAndAppend(timescales, streamingTrack.getTimescale());
|
||||
maxTrackId = Math.max(streamingTrack.getTrackExtension(TrackIdTrackExtension.class).getTrackId(), maxTrackId);
|
||||
}
|
||||
|
||||
|
||||
mvhd.setTimescale(Mp4Math.lcm(timescales));
|
||||
mvhd.setDuration((long) (Mp4Math.lcm(timescales) * duration));
|
||||
// find the next available trackId
|
||||
mvhd.setNextTrackId(maxTrackId + 1);
|
||||
return mvhd;
|
||||
}
|
||||
|
||||
private void write(final @NonNull WritableByteChannel out, Box... boxes) throws IOException {
|
||||
for (Box box1 : boxes) {
|
||||
box1.getBox(out);
|
||||
bytesWritten += box1.getSize();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests if the currently received samples for a given track
|
||||
* are already a 'chunk' as we want to have it. The next
|
||||
* sample will not be part of the chunk
|
||||
* will be added to the fragment buffer later.
|
||||
*
|
||||
* @param streamingTrack track to test
|
||||
* @param next the lastest samples
|
||||
* @return true if a chunk is to b e created.
|
||||
*/
|
||||
private boolean isChunkReady(StreamingTrack streamingTrack, StreamingSample next) {
|
||||
final long ts = Objects.requireNonNull(nextSampleStartTime.get(streamingTrack));
|
||||
final long cfst = Objects.requireNonNull(nextChunkCreateStartTime.get(streamingTrack));
|
||||
|
||||
return (ts >= cfst + 2 * streamingTrack.getTimescale());
|
||||
// chunk interleave of 2 seconds
|
||||
}
|
||||
|
||||
private void writeChunkContainer(ChunkContainer chunkContainer) throws IOException {
|
||||
final TrackBox tb = trackBoxes.get(chunkContainer.streamingTrack);
|
||||
final ChunkOffsetBox stco = Objects.requireNonNull(Path.getPath(tb, "mdia[0]/minf[0]/stbl[0]/stco[0]"));
|
||||
stco.setChunkOffsets(Mp4Arrays.copyOfAndAppend(stco.getChunkOffsets(), bytesWritten + 8));
|
||||
write(sink, chunkContainer.mdat);
|
||||
}
|
||||
|
||||
public void acceptSample(
|
||||
final @NonNull StreamingSample streamingSample,
|
||||
final @NonNull StreamingTrack streamingTrack) throws IOException
|
||||
{
|
||||
|
||||
TrackBox tb = trackBoxes.get(streamingTrack);
|
||||
if (tb == null) {
|
||||
tb = new TrackBox();
|
||||
tb.addBox(createTkhd(streamingTrack));
|
||||
tb.addBox(createMdia(streamingTrack));
|
||||
trackBoxes.put(streamingTrack, tb);
|
||||
}
|
||||
|
||||
if (isChunkReady(streamingTrack, streamingSample)) {
|
||||
|
||||
final ChunkContainer chunkContainer = createChunkContainer(streamingTrack);
|
||||
//System.err.println("Creating fragment for " + streamingTrack);
|
||||
Objects.requireNonNull(sampleBuffers.get(streamingTrack)).clear();
|
||||
nextChunkCreateStartTime.put(streamingTrack, Objects.requireNonNull(nextChunkCreateStartTime.get(streamingTrack)) + chunkContainer.duration);
|
||||
final Queue<ChunkContainer> chunkQueue = Objects.requireNonNull(chunkBuffers.get(streamingTrack));
|
||||
chunkQueue.add(chunkContainer);
|
||||
if (source.get(0) == streamingTrack) {
|
||||
|
||||
Queue<ChunkContainer> tracksFragmentQueue;
|
||||
StreamingTrack currentStreamingTrack;
|
||||
// This will write AT LEAST the currently created fragment and possibly a few more
|
||||
while (!(tracksFragmentQueue = chunkBuffers.get((currentStreamingTrack = this.source.get(0)))).isEmpty()) {
|
||||
final ChunkContainer currentFragmentContainer = tracksFragmentQueue.remove();
|
||||
writeChunkContainer(currentFragmentContainer);
|
||||
Log.d(TAG, "write chunk " + currentStreamingTrack.getHandler() + ". duration " + (double) currentFragmentContainer.duration / currentStreamingTrack.getTimescale());
|
||||
final long ts = Objects.requireNonNull(nextChunkWriteStartTime.get(currentStreamingTrack)) + currentFragmentContainer.duration;
|
||||
nextChunkWriteStartTime.put(currentStreamingTrack, ts);
|
||||
Log.d(TAG, currentStreamingTrack.getHandler() + " track advanced to " + (double) ts / currentStreamingTrack.getTimescale());
|
||||
sortTracks();
|
||||
}
|
||||
} else {
|
||||
Log.d(TAG, streamingTrack.getHandler() + " track delayed, queue size is " + chunkQueue.size());
|
||||
}
|
||||
}
|
||||
|
||||
Objects.requireNonNull(sampleBuffers.get(streamingTrack)).add(streamingSample);
|
||||
nextSampleStartTime.put(streamingTrack, Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) + streamingSample.getDuration());
|
||||
|
||||
}
|
||||
|
||||
private ChunkContainer createChunkContainer(final @NonNull StreamingTrack streamingTrack) {
|
||||
|
||||
final List<StreamingSample> samples = Objects.requireNonNull(sampleBuffers.get(streamingTrack));
|
||||
final long chunkNumber = Objects.requireNonNull(chunkNumbers.get(streamingTrack));
|
||||
chunkNumbers.put(streamingTrack, chunkNumber + 1);
|
||||
final ChunkContainer cc = new ChunkContainer();
|
||||
cc.streamingTrack = streamingTrack;
|
||||
cc.mdat = new Mdat(samples);
|
||||
cc.duration = Objects.requireNonNull(nextSampleStartTime.get(streamingTrack)) - Objects.requireNonNull(nextChunkCreateStartTime.get(streamingTrack));
|
||||
final TrackBox tb = trackBoxes.get(streamingTrack);
|
||||
final SampleTableBox stbl = Objects.requireNonNull(Path.getPath(tb, "mdia[0]/minf[0]/stbl[0]"));
|
||||
final SampleToChunkBox stsc = Objects.requireNonNull(Path.getPath(stbl, "stsc[0]"));
|
||||
if (stsc.getEntries().isEmpty()) {
|
||||
final List<SampleToChunkBox.Entry> entries = new ArrayList<>();
|
||||
stsc.setEntries(entries);
|
||||
entries.add(new SampleToChunkBox.Entry(chunkNumber, samples.size(), 1));
|
||||
} else {
|
||||
final SampleToChunkBox.Entry e = stsc.getEntries().get(stsc.getEntries().size() - 1);
|
||||
if (e.getSamplesPerChunk() != samples.size()) {
|
||||
stsc.getEntries().add(new SampleToChunkBox.Entry(chunkNumber, samples.size(), 1));
|
||||
}
|
||||
}
|
||||
long sampleNumber = Objects.requireNonNull(sampleNumbers.get(streamingTrack));
|
||||
|
||||
final SampleSizeBox stsz = Objects.requireNonNull(Path.getPath(stbl, "stsz[0]"));
|
||||
final TimeToSampleBox stts = Objects.requireNonNull(Path.getPath(stbl, "stts[0]"));
|
||||
SyncSampleBox stss = Path.getPath(stbl, "stss[0]");
|
||||
CompositionTimeToSample ctts = Path.getPath(stbl, "ctts[0]");
|
||||
if (streamingTrack.getTrackExtension(CompositionTimeTrackExtension.class) != null) {
|
||||
if (ctts == null) {
|
||||
ctts = new CompositionTimeToSample();
|
||||
ctts.setEntries(new ArrayList<>());
|
||||
|
||||
final ArrayList<Box> bs = new ArrayList<>(stbl.getBoxes());
|
||||
bs.add(bs.indexOf(stts), ctts);
|
||||
}
|
||||
}
|
||||
|
||||
final long[] sampleSizes = new long[samples.size()];
|
||||
int i = 0;
|
||||
for (StreamingSample sample : samples) {
|
||||
sampleSizes[i++] = sample.getContent().limit();
|
||||
|
||||
if (ctts != null) {
|
||||
ctts.getEntries().add(new CompositionTimeToSample.Entry(1, l2i(sample.getSampleExtension(CompositionTimeSampleExtension.class).getCompositionTimeOffset())));
|
||||
}
|
||||
|
||||
if (stts.getEntries().isEmpty()) {
|
||||
final ArrayList<TimeToSampleBox.Entry> entries = new ArrayList<>(stts.getEntries());
|
||||
entries.add(new TimeToSampleBox.Entry(1, sample.getDuration()));
|
||||
stts.setEntries(entries);
|
||||
} else {
|
||||
final TimeToSampleBox.Entry sttsEntry = stts.getEntries().get(stts.getEntries().size() - 1);
|
||||
if (sttsEntry.getDelta() == sample.getDuration()) {
|
||||
sttsEntry.setCount(sttsEntry.getCount() + 1);
|
||||
} else {
|
||||
stts.getEntries().add(new TimeToSampleBox.Entry(1, sample.getDuration()));
|
||||
}
|
||||
}
|
||||
final SampleFlagsSampleExtension sampleFlagsSampleExtension = sample.getSampleExtension(SampleFlagsSampleExtension.class);
|
||||
if (sampleFlagsSampleExtension != null && sampleFlagsSampleExtension.isSyncSample()) {
|
||||
if (stss == null) {
|
||||
stss = new SyncSampleBox();
|
||||
stbl.addBox(stss);
|
||||
}
|
||||
stss.setSampleNumber(Mp4Arrays.copyOfAndAppend(stss.getSampleNumber(), sampleNumber));
|
||||
}
|
||||
sampleNumber++;
|
||||
|
||||
}
|
||||
stsz.setSampleSizes(Mp4Arrays.copyOfAndAppend(stsz.getSampleSizes(), sampleSizes));
|
||||
|
||||
sampleNumbers.put(streamingTrack, sampleNumber);
|
||||
samples.clear();
|
||||
Log.d(TAG, "chunk container created for " + streamingTrack.getHandler() + ". mdat size: " + cc.mdat.size + ". chunk duration is " + (double) cc.duration / streamingTrack.getTimescale());
|
||||
return cc;
|
||||
}
|
||||
|
||||
protected @NonNull Box createMdhd(final @NonNull StreamingTrack streamingTrack) {
|
||||
final MediaHeaderBox mdhd = new MediaHeaderBox();
|
||||
mdhd.setCreationTime(creationTime);
|
||||
mdhd.setModificationTime(creationTime);
|
||||
//mdhd.setDuration(nextSampleStartTime.get(streamingTrack)); will update at the end, in createMoov
|
||||
mdhd.setTimescale(streamingTrack.getTimescale());
|
||||
mdhd.setLanguage(streamingTrack.getLanguage());
|
||||
return mdhd;
|
||||
}
|
||||
|
||||
private class Mdat implements Box {
|
||||
final ArrayList<StreamingSample> samples;
|
||||
long size;
|
||||
|
||||
Mdat(final @NonNull List<StreamingSample> samples) {
|
||||
this.samples = new ArrayList<>(samples);
|
||||
size = 8;
|
||||
for (StreamingSample sample : samples) {
|
||||
size += sample.getContent().limit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "mdat";
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getBox(WritableByteChannel writableByteChannel) throws IOException {
|
||||
writableByteChannel.write(ByteBuffer.wrap(new byte[]{
|
||||
(byte) ((size & 0xff000000) >> 24),
|
||||
(byte) ((size & 0xff0000) >> 16),
|
||||
(byte) ((size & 0xff00) >> 8),
|
||||
(byte) ((size & 0xff)),
|
||||
109, 100, 97, 116, // mdat
|
||||
|
||||
}));
|
||||
for (StreamingSample sample : samples) {
|
||||
writableByteChannel.write((ByteBuffer) sample.getContent().rewind());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class ChunkContainer {
|
||||
Mdat mdat;
|
||||
StreamingTrack streamingTrack;
|
||||
long duration;
|
||||
}
|
||||
}
|
@@ -0,0 +1,12 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
final class MuxingException extends RuntimeException {
|
||||
|
||||
public MuxingException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public MuxingException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -0,0 +1,144 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaFormat;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import org.mp4parser.streaming.StreamingTrack;
|
||||
import org.thoughtcrime.securesms.video.videoconverter.Muxer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.Channels;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public final class StreamingMuxer implements Muxer {
|
||||
|
||||
private final OutputStream outputStream;
|
||||
private final List<MediaCodecTrack> tracks = new ArrayList<>();
|
||||
private Mp4Writer mp4Writer;
|
||||
|
||||
public StreamingMuxer(OutputStream outputStream) {
|
||||
this.outputStream = outputStream;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() throws IOException {
|
||||
final List<StreamingTrack> source = new ArrayList<>();
|
||||
for (MediaCodecTrack track : tracks) {
|
||||
source.add((StreamingTrack) track);
|
||||
}
|
||||
mp4Writer = new Mp4Writer(source, Channels.newChannel(outputStream));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() throws IOException {
|
||||
if (mp4Writer == null) {
|
||||
throw new IllegalStateException("calling stop prior to start");
|
||||
}
|
||||
for (MediaCodecTrack track : tracks) {
|
||||
track.finish();
|
||||
}
|
||||
mp4Writer.close();
|
||||
mp4Writer = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int addTrack(@NonNull MediaFormat format) throws IOException {
|
||||
|
||||
final String mime = format.getString(MediaFormat.KEY_MIME);
|
||||
switch (mime) {
|
||||
case "video/avc":
|
||||
tracks.add(new MediaCodecAvcTrack(format));
|
||||
break;
|
||||
case "audio/mp4a-latm":
|
||||
tracks.add(new MediaCodecAacTrack(format));
|
||||
break;
|
||||
case "video/hevc":
|
||||
tracks.add(new MediaCodecHevcTrack(format));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown track format");
|
||||
}
|
||||
return tracks.size() - 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
|
||||
tracks.get(trackIndex).writeSampleData(byteBuf, bufferInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
}
|
||||
|
||||
interface MediaCodecTrack {
|
||||
void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException;
|
||||
|
||||
void finish() throws IOException;
|
||||
}
|
||||
|
||||
static class MediaCodecAvcTrack extends AvcTrack implements MediaCodecTrack {
|
||||
|
||||
MediaCodecAvcTrack(@NonNull MediaFormat format) {
|
||||
super(Utils.subBuffer(format.getByteBuffer("csd-0"), 4), Utils.subBuffer(format.getByteBuffer("csd-1"), 4));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
|
||||
final List<ByteBuffer> nals = H264Utils.getNals(byteBuf);
|
||||
for (ByteBuffer nal : nals) {
|
||||
consumeNal(Utils.clone(nal), bufferInfo.presentationTimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish() throws IOException {
|
||||
consumeLastNal();
|
||||
}
|
||||
}
|
||||
|
||||
static class MediaCodecHevcTrack extends HevcTrack implements MediaCodecTrack {
|
||||
|
||||
MediaCodecHevcTrack(@NonNull MediaFormat format) throws IOException {
|
||||
super(H264Utils.getNals(format.getByteBuffer("csd-0")));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
|
||||
final List<ByteBuffer> nals = H264Utils.getNals(byteBuf);
|
||||
for (ByteBuffer nal : nals) {
|
||||
consumeNal(Utils.clone(nal), bufferInfo.presentationTimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish() throws IOException {
|
||||
consumeLastNal();
|
||||
}
|
||||
}
|
||||
|
||||
static class MediaCodecAacTrack extends AacTrack implements MediaCodecTrack {
|
||||
|
||||
MediaCodecAacTrack(@NonNull MediaFormat format) {
|
||||
super(format.getInteger(MediaFormat.KEY_BIT_RATE), format.getInteger(MediaFormat.KEY_BIT_RATE),
|
||||
format.getInteger(MediaFormat.KEY_SAMPLE_RATE), format.getInteger(MediaFormat.KEY_CHANNEL_COUNT),
|
||||
format.getInteger(MediaFormat.KEY_AAC_PROFILE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeSampleData(@NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException {
|
||||
final byte[] buffer = new byte[bufferInfo.size];
|
||||
byteBuf.position(bufferInfo.offset);
|
||||
byteBuf.get(buffer, 0, bufferInfo.size);
|
||||
processSample(ByteBuffer.wrap(buffer));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish() {
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,44 @@
|
||||
package org.thoughtcrime.securesms.video.videoconverter.muxer;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Based on https://github.com/jcodec/jcodec/blob/master/src/main/java/org/jcodec/codecs/h264/H264Utils.java
|
||||
*/
|
||||
final class Utils {
|
||||
|
||||
private Utils() {}
|
||||
|
||||
static byte[] toArray(final @NonNull ByteBuffer buf) {
|
||||
final ByteBuffer newBuf = buf.duplicate();
|
||||
byte[] bytes = new byte[newBuf.remaining()];
|
||||
newBuf.get(bytes, 0, bytes.length);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public static ByteBuffer clone(final @NonNull ByteBuffer original) {
|
||||
final ByteBuffer clone = ByteBuffer.allocate(original.capacity());
|
||||
original.rewind();
|
||||
clone.put(original);
|
||||
original.rewind();
|
||||
clone.flip();
|
||||
return clone;
|
||||
}
|
||||
|
||||
static @NonNull ByteBuffer subBuffer(final @NonNull ByteBuffer buf, final int start) {
|
||||
return subBuffer(buf, start, buf.remaining() - start);
|
||||
}
|
||||
|
||||
static @NonNull ByteBuffer subBuffer(final @NonNull ByteBuffer buf, final int start, final int count) {
|
||||
final ByteBuffer newBuf = buf.duplicate();
|
||||
byte[] bytes = new byte[count];
|
||||
newBuf.position(start);
|
||||
newBuf.get(bytes, 0, bytes.length);
|
||||
return ByteBuffer.wrap(bytes);
|
||||
}
|
||||
}
|
24
video/witness-verifications.gradle
Normal file
24
video/witness-verifications.gradle
Normal file
@@ -0,0 +1,24 @@
|
||||
// Auto-generated, use ./gradlew calculateChecksums to regenerate
|
||||
|
||||
dependencyVerification {
|
||||
verify = [
|
||||
|
||||
['androidx.annotation:annotation:1.1.0',
|
||||
'd38d63edb30f1467818d50aaf05f8a692dea8b31392a049bfa991b159ad5b692'],
|
||||
|
||||
['com.google.protobuf:protobuf-javalite:3.10.0',
|
||||
'215a94dbe100130295906b531bb72a26965c7ac8fcd9a75bf8054a8ac2abf4b4'],
|
||||
|
||||
['org.mp4parser:isoparser:1.9.39',
|
||||
'a3a7172648f1ac4b2a369ecca2861317e472179c842a5217b08643ba0a1dfa12'],
|
||||
|
||||
['org.mp4parser:muxer:1.9.39',
|
||||
'4befe68d411cd889628b53bab211d395899a9ce893ae6766ec2f4fefec5b7835'],
|
||||
|
||||
['org.mp4parser:streaming:1.9.39',
|
||||
'da5151cfc3bf491d550fb9127bba22736f4b7416058d58a1a5fcfdfa3673876d'],
|
||||
|
||||
['org.slf4j:slf4j-api:1.7.24',
|
||||
'baf3c7fe15fefeaf9e5b000d94547379dc48370f22a8797e239c127e7d7756ec'],
|
||||
]
|
||||
}
|
Reference in New Issue
Block a user