Audio parsing and RMS computation for waveform visualization.

This commit is contained in:
Anton Chekulaev 2020-10-08 15:32:47 +11:00
parent e07cb716c0
commit 692741f406
13 changed files with 894 additions and 85 deletions

View File

@ -149,6 +149,7 @@ dependencies {
implementation "com.fasterxml.jackson.core:jackson-databind:2.9.8"
implementation "com.squareup.okhttp3:okhttp:3.12.1"
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.3.9'
implementation "nl.komponents.kovenant:kovenant:$kovenant_version"
implementation "nl.komponents.kovenant:kovenant-android:$kovenant_version"
implementation "com.github.lelloman:android-identicons:v11"

View File

@ -32,7 +32,7 @@
app:minHeight="100dp"
app:maxHeight="300dp"/>
<org.thoughtcrime.securesms.components.AudioView
<org.thoughtcrime.securesms.loki.views.MessageAudioView
android:id="@+id/attachment_audio"
android:layout_width="210dp"
android:layout_height="wrap_content"

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<org.thoughtcrime.securesms.components.AudioView
<org.thoughtcrime.securesms.loki.views.MessageAudioView
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<org.thoughtcrime.securesms.components.AudioView
<org.thoughtcrime.securesms.loki.views.MessageAudioView
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/audio_view"

View File

@ -2,7 +2,7 @@
<merge xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
xmlns:app="http://schemas.android.com/apk/res-auto"
tools:context="org.thoughtcrime.securesms.components.AudioView">
tools:context="org.thoughtcrime.securesms.loki.views.MessageAudioView">
<LinearLayout android:id="@+id/audio_widget_container"
android:orientation="vertical"
@ -71,10 +71,10 @@
</org.thoughtcrime.securesms.components.AnimatingToggle>
<!-- TODO: Extract styling attributes into a theme. -->
<org.thoughtcrime.securesms.components.WaveformSeekBar
<org.thoughtcrime.securesms.loki.views.WaveformSeekBar
android:id="@+id/seek"
android:layout_width="fill_parent"
android:layout_height="30dp"
android:layout_height="40dp"
android:layout_gravity="center_vertical"
app:wave_background_color="#bbb"
app:wave_progress_color="?colorPrimary"

View File

@ -169,7 +169,7 @@
<attr name="useSmallIcon" format="boolean" />
</declare-styleable>
<declare-styleable name="AudioView">
<declare-styleable name="MessageAudioView">
<attr name="widgetBackground" format="color"/>
<attr name="foregroundTintColor" format="color" />
<attr name="backgroundTintColor" format="color" />

View File

@ -0,0 +1,331 @@
package org.thoughtcrime.securesms.components;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.AnimatedVectorDrawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.pnikosis.materialishprogress.ProgressWheel;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import org.thoughtcrime.securesms.audio.AudioSlidePlayer;
import org.thoughtcrime.securesms.database.AttachmentDatabase;
import org.thoughtcrime.securesms.events.PartProgressEvent;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mms.AudioSlide;
import org.thoughtcrime.securesms.mms.SlideClickListener;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import network.loki.messenger.R;
public class AudioViewOld extends FrameLayout implements AudioSlidePlayer.Listener {
private static final String TAG = AudioViewOld.class.getSimpleName();
private final @NonNull AnimatingToggle controlToggle;
private final @NonNull ViewGroup container;
private final @NonNull ImageView playButton;
private final @NonNull ImageView pauseButton;
private final @NonNull ImageView downloadButton;
private final @NonNull ProgressWheel downloadProgress;
private final @NonNull SeekBar seekBar;
private final @NonNull TextView timestamp;
private @Nullable SlideClickListener downloadListener;
private @Nullable AudioSlidePlayer audioSlidePlayer;
private int backwardsCounter;
public AudioViewOld(Context context) {
this(context, null);
}
public AudioViewOld(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public AudioViewOld(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
inflate(context, R.layout.message_audio_view, this);
this.container = (ViewGroup) findViewById(R.id.audio_widget_container);
this.controlToggle = (AnimatingToggle) findViewById(R.id.control_toggle);
this.playButton = (ImageView) findViewById(R.id.play);
this.pauseButton = (ImageView) findViewById(R.id.pause);
this.downloadButton = (ImageView) findViewById(R.id.download);
this.downloadProgress = (ProgressWheel) findViewById(R.id.download_progress);
this.seekBar = (SeekBar) findViewById(R.id.seek);
this.timestamp = (TextView) findViewById(R.id.timestamp);
this.playButton.setOnClickListener(new PlayClickedListener());
this.pauseButton.setOnClickListener(new PauseClickedListener());
this.seekBar.setOnSeekBarChangeListener(new SeekBarModifiedListener());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
this.playButton.setImageDrawable(context.getDrawable(R.drawable.play_icon));
this.pauseButton.setImageDrawable(context.getDrawable(R.drawable.pause_icon));
this.playButton.setBackground(context.getDrawable(R.drawable.ic_circle_fill_white_48dp));
this.pauseButton.setBackground(context.getDrawable(R.drawable.ic_circle_fill_white_48dp));
}
if (attrs != null) {
TypedArray typedArray = context.getTheme().obtainStyledAttributes(attrs, R.styleable.MessageAudioView, 0, 0);
setTint(typedArray.getColor(R.styleable.MessageAudioView_foregroundTintColor, Color.WHITE),
typedArray.getColor(R.styleable.MessageAudioView_backgroundTintColor, Color.WHITE));
container.setBackgroundColor(typedArray.getColor(R.styleable.MessageAudioView_widgetBackground, Color.TRANSPARENT));
typedArray.recycle();
}
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (!EventBus.getDefault().isRegistered(this)) EventBus.getDefault().register(this);
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
EventBus.getDefault().unregister(this);
}
public void setAudio(final @NonNull AudioSlide audio,
final boolean showControls)
{
if (showControls && audio.isPendingDownload()) {
controlToggle.displayQuick(downloadButton);
seekBar.setEnabled(false);
downloadButton.setOnClickListener(new DownloadClickedListener(audio));
if (downloadProgress.isSpinning()) downloadProgress.stopSpinning();
} else if (showControls && audio.getTransferState() == AttachmentDatabase.TRANSFER_PROGRESS_STARTED) {
controlToggle.displayQuick(downloadProgress);
seekBar.setEnabled(false);
downloadProgress.spin();
} else {
controlToggle.displayQuick(playButton);
seekBar.setEnabled(true);
if (downloadProgress.isSpinning()) downloadProgress.stopSpinning();
}
this.audioSlidePlayer = AudioSlidePlayer.createFor(getContext(), audio, this);
}
public void cleanup() {
if (this.audioSlidePlayer != null && pauseButton.getVisibility() == View.VISIBLE) {
this.audioSlidePlayer.stop();
}
}
public void setDownloadClickListener(@Nullable SlideClickListener listener) {
this.downloadListener = listener;
}
@Override
public void onPlayerStart(@NonNull AudioSlidePlayer player) {
if (this.pauseButton.getVisibility() != View.VISIBLE) {
togglePlayToPause();
}
}
@Override
public void onPlayerStop(@NonNull AudioSlidePlayer player) {
if (this.playButton.getVisibility() != View.VISIBLE) {
togglePauseToPlay();
}
if (seekBar.getProgress() + 5 >= seekBar.getMax()) {
backwardsCounter = 4;
onPlayerProgress(player, 0.0, 0);
}
}
@Override
public void setFocusable(boolean focusable) {
super.setFocusable(focusable);
this.playButton.setFocusable(focusable);
this.pauseButton.setFocusable(focusable);
this.seekBar.setFocusable(focusable);
this.seekBar.setFocusableInTouchMode(focusable);
this.downloadButton.setFocusable(focusable);
}
@Override
public void setClickable(boolean clickable) {
super.setClickable(clickable);
this.playButton.setClickable(clickable);
this.pauseButton.setClickable(clickable);
this.seekBar.setClickable(clickable);
this.seekBar.setOnTouchListener(clickable ? null : new TouchIgnoringListener());
this.downloadButton.setClickable(clickable);
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
this.playButton.setEnabled(enabled);
this.pauseButton.setEnabled(enabled);
this.seekBar.setEnabled(enabled);
this.downloadButton.setEnabled(enabled);
}
@Override
public void onPlayerProgress(@NonNull AudioSlidePlayer player, double progress, long millis) {
int seekProgress = (int)Math.floor(progress * this.seekBar.getMax());
if (seekProgress > seekBar.getProgress() || backwardsCounter > 3) {
backwardsCounter = 0;
this.seekBar.setProgress(seekProgress);
this.timestamp.setText(String.format("%02d:%02d",
TimeUnit.MILLISECONDS.toMinutes(millis),
TimeUnit.MILLISECONDS.toSeconds(millis)));
} else {
backwardsCounter++;
}
}
public void setTint(int foregroundTint, int backgroundTint) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
this.playButton.setBackgroundTintList(ColorStateList.valueOf(foregroundTint));
this.playButton.setImageTintList(ColorStateList.valueOf(backgroundTint));
this.pauseButton.setBackgroundTintList(ColorStateList.valueOf(foregroundTint));
this.pauseButton.setImageTintList(ColorStateList.valueOf(backgroundTint));
} else {
this.playButton.setColorFilter(foregroundTint, PorterDuff.Mode.SRC_IN);
this.pauseButton.setColorFilter(foregroundTint, PorterDuff.Mode.SRC_IN);
}
this.downloadButton.setColorFilter(foregroundTint, PorterDuff.Mode.SRC_IN);
this.downloadProgress.setBarColor(foregroundTint);
this.timestamp.setTextColor(foregroundTint);
this.seekBar.getProgressDrawable().setColorFilter(foregroundTint, PorterDuff.Mode.SRC_IN);
this.seekBar.getThumb().setColorFilter(foregroundTint, PorterDuff.Mode.SRC_IN);
}
private double getProgress() {
if (this.seekBar.getProgress() <= 0 || this.seekBar.getMax() <= 0) {
return 0;
} else {
return (double)this.seekBar.getProgress() / (double)this.seekBar.getMax();
}
}
private void togglePlayToPause() {
controlToggle.displayQuick(pauseButton);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
AnimatedVectorDrawable playToPauseDrawable = (AnimatedVectorDrawable)getContext().getDrawable(R.drawable.play_to_pause_animation);
pauseButton.setImageDrawable(playToPauseDrawable);
playToPauseDrawable.start();
}
}
private void togglePauseToPlay() {
controlToggle.displayQuick(playButton);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
AnimatedVectorDrawable pauseToPlayDrawable = (AnimatedVectorDrawable)getContext().getDrawable(R.drawable.pause_to_play_animation);
playButton.setImageDrawable(pauseToPlayDrawable);
pauseToPlayDrawable.start();
}
}
private class PlayClickedListener implements OnClickListener {
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onClick(View v) {
try {
Log.d(TAG, "playbutton onClick");
if (audioSlidePlayer != null) {
togglePlayToPause();
audioSlidePlayer.play(getProgress());
}
} catch (IOException e) {
Log.w(TAG, e);
}
}
}
private class PauseClickedListener implements OnClickListener {
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onClick(View v) {
Log.d(TAG, "pausebutton onClick");
if (audioSlidePlayer != null) {
togglePauseToPlay();
audioSlidePlayer.stop();
}
}
}
private class DownloadClickedListener implements OnClickListener {
private final @NonNull AudioSlide slide;
private DownloadClickedListener(@NonNull AudioSlide slide) {
this.slide = slide;
}
@Override
public void onClick(View v) {
if (downloadListener != null) downloadListener.onClick(v, slide);
}
}
private class SeekBarModifiedListener implements SeekBar.OnSeekBarChangeListener {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {}
@Override
public synchronized void onStartTrackingTouch(SeekBar seekBar) {
if (audioSlidePlayer != null && pauseButton.getVisibility() == View.VISIBLE) {
audioSlidePlayer.stop();
}
}
@Override
public synchronized void onStopTrackingTouch(SeekBar seekBar) {
try {
if (audioSlidePlayer != null && pauseButton.getVisibility() == View.VISIBLE) {
audioSlidePlayer.play(getProgress());
}
} catch (IOException e) {
Log.w(TAG, e);
}
}
}
private static class TouchIgnoringListener implements OnTouchListener {
@Override
public boolean onTouch(View v, MotionEvent event) {
return true;
}
}
@Subscribe(sticky = true, threadMode = ThreadMode.MAIN)
public void onEventAsync(final PartProgressEvent event) {
if (audioSlidePlayer != null && event.attachment.equals(audioSlidePlayer.getAudioSlide().asAttachment())) {
downloadProgress.setInstantProgress(((float) event.progress) / event.total);
}
}
}

View File

@ -60,7 +60,7 @@ import org.thoughtcrime.securesms.MediaPreviewActivity;
import org.thoughtcrime.securesms.MessageDetailsActivity;
import org.thoughtcrime.securesms.attachments.DatabaseAttachment;
import org.thoughtcrime.securesms.components.AlertView;
import org.thoughtcrime.securesms.components.AudioView;
import org.thoughtcrime.securesms.loki.views.MessageAudioView;
import org.thoughtcrime.securesms.components.ConversationItemFooter;
import org.thoughtcrime.securesms.components.ConversationItemThumbnail;
import org.thoughtcrime.securesms.components.DocumentView;
@ -161,7 +161,7 @@ public class ConversationItem extends TapJackingProofLinearLayout
private @NonNull Set<MessageRecord> batchSelected = new HashSet<>();
private Recipient conversationRecipient;
private Stub<ConversationItemThumbnail> mediaThumbnailStub;
private Stub<AudioView> audioViewStub;
private Stub<MessageAudioView> audioViewStub;
private Stub<DocumentView> documentViewStub;
private Stub<SharedContactView> sharedContactStub;
private Stub<LinkPreviewView> linkPreviewStub;

View File

@ -0,0 +1,319 @@
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.loki.utilities.audio;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaDataSource;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Build;
import androidx.annotation.RequiresApi;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
/**
* Partially exported class from the old Google's Ringdroid project.
* https://github.com/google/ringdroid/blob/master/app/src/main/java/com/ringdroid/soundfile/SoundFile.java
* <p/>
* We need this one to parse audio files. Specifically extract RMS values for waveform visualization.
* <p/>
* NOTE: This class instance creation might be pretty slow (depends on the source audio file size).
* It's recommended to instantiate it in the background.
*/
public class DecodedAudio {
// Member variables representing frame data
private final long mFileSize;
private final int mAvgBitRate; // Average bit rate in kbps.
private final int mSampleRate;
private final int mChannels;
private final int mNumSamples; // total number of samples per channel in audio file
private final ShortBuffer mDecodedSamples; // shared buffer with mDecodedBytes.
// mDecodedSamples has the following format:
// {s1c1, s1c2, ..., s1cM, s2c1, ..., s2cM, ..., sNc1, ..., sNcM}
// where sicj is the ith sample of the jth channel (a sample is a signed short)
// M is the number of channels (e.g. 2 for stereo) and N is the number of samples per channel.
// TODO(nfaralli): what is the real list of supported extensions? Is it device dependent?
public static String[] getSupportedExtensions() {
return new String[]{"mp3", "wav", "3gpp", "3gp", "amr", "aac", "m4a", "ogg"};
}
public static boolean isFilenameSupported(String filename) {
String[] extensions = getSupportedExtensions();
for (int i = 0; i < extensions.length; i++) {
if (filename.endsWith("." + extensions[i])) {
return true;
}
}
return false;
}
public DecodedAudio(FileDescriptor fd, long startOffset, long size) throws IOException {
this(createMediaExtractor(fd, startOffset, size), size);
}
@RequiresApi(api = Build.VERSION_CODES.M)
public DecodedAudio(MediaDataSource dataSource) throws IOException {
this(createMediaExtractor(dataSource), dataSource.getSize());
}
public DecodedAudio(MediaExtractor extractor, long size) throws IOException {
mFileSize = size;
int numTracks = extractor.getTrackCount();
// find and select the first audio track present in the file.
MediaFormat format = null;
int trackIndex;
for (trackIndex = 0; trackIndex < numTracks; trackIndex++) {
format = extractor.getTrackFormat(trackIndex);
if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) {
extractor.selectTrack(trackIndex);
break;
}
}
if (trackIndex == numTracks) {
throw new IOException("No audio track found in the data source.");
}
mChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
// Expected total number of samples per channel.
int expectedNumSamples =
(int) ((format.getLong(MediaFormat.KEY_DURATION) / 1000000.f) * mSampleRate + 0.5f);
MediaCodec codec = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
codec.configure(format, null, null, 0);
codec.start();
try {
int pcmEncoding = codec.getOutputFormat().getInteger(MediaFormat.KEY_PCM_ENCODING);
if (pcmEncoding != AudioFormat.ENCODING_PCM_16BIT) {
throw new IOException("Unsupported PCM encoding code: " + pcmEncoding);
}
} catch (NullPointerException e) {
// If KEY_PCM_ENCODING is not specified, means it's ENCODING_PCM_16BIT.
}
int decodedSamplesSize = 0; // size of the output buffer containing decoded samples.
byte[] decodedSamples = null;
int sampleSize;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
long presentationTime;
int totalSizeRead = 0;
boolean doneReading = false;
// Set the size of the decoded samples buffer to 1MB (~6sec of a stereo stream at 44.1kHz).
// For longer streams, the buffer size will be increased later on, calculating a rough
// estimate of the total size needed to store all the samples in order to resize the buffer
// only once.
ByteBuffer decodedBytes = ByteBuffer.allocate(1 << 20);
boolean firstSampleData = true;
while (true) {
// read data from file and feed it to the decoder input buffers.
int inputBufferIndex = codec.dequeueInputBuffer(100);
if (!doneReading && inputBufferIndex >= 0) {
sampleSize = extractor.readSampleData(codec.getInputBuffer(inputBufferIndex), 0);
if (firstSampleData
&& format.getString(MediaFormat.KEY_MIME).equals("audio/mp4a-latm")
&& sampleSize == 2) {
// For some reasons on some devices (e.g. the Samsung S3) you should not
// provide the first two bytes of an AAC stream, otherwise the MediaCodec will
// crash. These two bytes do not contain music data but basic info on the
// stream (e.g. channel configuration and sampling frequency), and skipping them
// seems OK with other devices (MediaCodec has already been configured and
// already knows these parameters).
extractor.advance();
totalSizeRead += sampleSize;
} else if (sampleSize < 0) {
// All samples have been read.
codec.queueInputBuffer(
inputBufferIndex, 0, 0, -1, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
doneReading = true;
} else {
presentationTime = extractor.getSampleTime();
codec.queueInputBuffer(inputBufferIndex, 0, sampleSize, presentationTime, 0);
extractor.advance();
totalSizeRead += sampleSize;
}
firstSampleData = false;
}
// Get decoded stream from the decoder output buffers.
int outputBufferIndex = codec.dequeueOutputBuffer(info, 100);
if (outputBufferIndex >= 0 && info.size > 0) {
if (decodedSamplesSize < info.size) {
decodedSamplesSize = info.size;
decodedSamples = new byte[decodedSamplesSize];
}
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferIndex);
outputBuffer.get(decodedSamples, 0, info.size);
outputBuffer.clear();
// Check if buffer is big enough. Resize it if it's too small.
if (decodedBytes.remaining() < info.size) {
// Getting a rough estimate of the total size, allocate 20% more, and
// make sure to allocate at least 5MB more than the initial size.
int position = decodedBytes.position();
int newSize = (int) ((position * (1.0 * mFileSize / totalSizeRead)) * 1.2);
if (newSize - position < info.size + 5 * (1 << 20)) {
newSize = position + info.size + 5 * (1 << 20);
}
ByteBuffer newDecodedBytes = null;
// Try to allocate memory. If we are OOM, try to run the garbage collector.
int retry = 10;
while (retry > 0) {
try {
newDecodedBytes = ByteBuffer.allocate(newSize);
break;
} catch (OutOfMemoryError oome) {
// setting android:largeHeap="true" in <application> seem to help not
// reaching this section.
retry--;
}
}
if (retry == 0) {
// Failed to allocate memory... Stop reading more data and finalize the
// instance with the data decoded so far.
break;
}
//ByteBuffer newDecodedBytes = ByteBuffer.allocate(newSize);
decodedBytes.rewind();
newDecodedBytes.put(decodedBytes);
decodedBytes = newDecodedBytes;
decodedBytes.position(position);
}
decodedBytes.put(decodedSamples, 0, info.size);
codec.releaseOutputBuffer(outputBufferIndex, false);
} /*else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Subsequent data will conform to new format.
// We could check that codec.getOutputFormat(), which is the new output format,
// is what we expect.
}*/
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0
|| (decodedBytes.position() / (2 * mChannels)) >= expectedNumSamples) {
// We got all the decoded data from the decoder. Stop here.
// Theoretically dequeueOutputBuffer(info, ...) should have set info.flags to
// MediaCodec.BUFFER_FLAG_END_OF_STREAM. However some phones (e.g. Samsung S3)
// won't do that for some files (e.g. with mono AAC files), in which case subsequent
// calls to dequeueOutputBuffer may result in the application crashing, without
// even an exception being thrown... Hence the second check.
// (for mono AAC files, the S3 will actually double each sample, as if the stream
// was stereo. The resulting stream is half what it's supposed to be and with a much
// lower pitch.)
break;
}
}
mNumSamples = decodedBytes.position() / (mChannels * 2); // One sample = 2 bytes.
decodedBytes.rewind();
decodedBytes.order(ByteOrder.LITTLE_ENDIAN);
mDecodedSamples = decodedBytes.asShortBuffer();
mAvgBitRate = (int) ((mFileSize * 8) * ((float) mSampleRate / mNumSamples) / 1000);
extractor.release();
codec.stop();
codec.release();
// // Temporary hack to make it work with the old version.
// int numFrames = mNumSamples / getSamplesPerFrame();
// if (mNumSamples % getSamplesPerFrame() != 0) {
// numFrames++;
// }
// mFrameGains = new int[numFrames];
// mFrameLens = new int[numFrames];
// mFrameOffsets = new int[numFrames];
// int j;
// int gain, value;
// int frameLens = (int) ((1000 * mAvgBitRate / 8) *
// ((float) getSamplesPerFrame() / mSampleRate));
// for (trackIndex = 0; trackIndex < numFrames; trackIndex++) {
// gain = -1;
// for (j = 0; j < getSamplesPerFrame(); j++) {
// value = 0;
// for (int k = 0; k < mChannels; k++) {
// if (mDecodedSamples.remaining() > 0) {
// value += java.lang.Math.abs(mDecodedSamples.get());
// }
// }
// value /= mChannels;
// if (gain < value) {
// gain = value;
// }
// }
// mFrameGains[trackIndex] = (int) Math.sqrt(gain); // here gain = sqrt(max value of 1st channel)...
// mFrameLens[trackIndex] = frameLens; // totally not accurate...
// mFrameOffsets[trackIndex] = (int) (trackIndex * (1000 * mAvgBitRate / 8) * // = i * frameLens
// ((float) getSamplesPerFrame() / mSampleRate));
// }
// mDecodedSamples.rewind();
// mNumFrames = numFrames;
}
public long getFileSizeBytes() {
return mFileSize;
}
public int getAvgBitrateKbps() {
return mAvgBitRate;
}
public int getSampleRate() {
return mSampleRate;
}
public int getChannels() {
return mChannels;
}
public int getNumSamples() {
return mNumSamples; // Number of samples per channel.
}
public ShortBuffer getSamples() {
if (mDecodedSamples != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N &&
Build.VERSION.SDK_INT <= Build.VERSION_CODES.N_MR1) {
// Hack for Nougat where asReadOnlyBuffer fails to respect byte ordering.
// See https://code.google.com/p/android/issues/detail?id=223824
return mDecodedSamples;
} else {
return mDecodedSamples.asReadOnlyBuffer();
}
} else {
return null;
}
}
private static MediaExtractor createMediaExtractor(FileDescriptor fd, long startOffset, long size) throws IOException {
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(fd, startOffset, size);
return extractor;
}
@RequiresApi(api = Build.VERSION_CODES.M)
private static MediaExtractor createMediaExtractor(MediaDataSource dataSource) throws IOException {
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(dataSource);
return extractor;
}
}

View File

@ -0,0 +1,90 @@
package org.thoughtcrime.securesms.loki.utilities.audio;
import java.nio.ShortBuffer
import kotlin.math.ceil
import kotlin.math.sqrt
/**
* Computes audio RMS values for the first channel only.
*
* A typical RMS calculation algorithm is:
* 1. Square each sample
* 2. Sum the squared samples
* 3. Divide the sum of the squared samples by the number of samples
* 4. Take the square root of step 3., the mean of the squared samples
*
* @param maxFrames Defines amount of output RMS frames.
* If number of samples per channel is less than "maxFrames",
* the result array will match the source sample size instead.
*
* @return Normalized RMS values float array.
*/
fun DecodedAudio.calculateRms(maxFrames: Int): FloatArray {
return calculateRms(this.samples, this.numSamples, this.channels, maxFrames)
}
private fun calculateRms(samples: ShortBuffer, numSamples: Int, channels: Int, maxFrames: Int): FloatArray {
val numFrames: Int
val frameStep: Float
val samplesPerChannel = numSamples / channels
if (samplesPerChannel <= maxFrames) {
frameStep = 1f
numFrames = samplesPerChannel
} else {
frameStep = numSamples / maxFrames.toFloat()
numFrames = maxFrames
}
val rmsValues = FloatArray(numFrames)
var squaredFrameSum = 0.0
var currentFrameIdx = 0
fun calculateFrameRms(nextFrameIdx: Int) {
rmsValues[currentFrameIdx] = sqrt(squaredFrameSum.toFloat())
// Advance to the next frame.
squaredFrameSum = 0.0
currentFrameIdx = nextFrameIdx
}
(0 until numSamples * channels step channels).forEach { sampleIdx ->
val channelSampleIdx = sampleIdx / channels
val frameIdx = (channelSampleIdx / frameStep).toInt()
if (currentFrameIdx != frameIdx) {
// Calculate RMS value for the previous frame.
calculateFrameRms(frameIdx)
}
val samplesInCurrentFrame = ceil((currentFrameIdx + 1) * frameStep) - ceil(currentFrameIdx * frameStep)
squaredFrameSum += (samples[sampleIdx] * samples[sampleIdx]) / samplesInCurrentFrame
}
// Calculate RMS value for the last frame.
calculateFrameRms(-1)
normalizeArray(rmsValues)
return rmsValues
}
/**
* Normalizes the array's values to [0..1] range.
*/
private fun normalizeArray(values: FloatArray) {
var maxValue = -Float.MAX_VALUE
var minValue = +Float.MAX_VALUE
values.forEach { value ->
if (value > maxValue) maxValue = value
if (value < minValue) minValue = value
}
val span = maxValue - minValue
if (span == 0f) {
values.indices.forEach { i -> values[i] = 0f }
return
}
values.indices.forEach { i -> values[i] = (values[i] - minValue) / span }
}

View File

@ -1,10 +1,11 @@
package org.thoughtcrime.securesms.components
package org.thoughtcrime.securesms.loki.views
import android.content.Context
import android.content.res.ColorStateList
import android.graphics.Color
import android.graphics.PorterDuff
import android.graphics.drawable.AnimatedVectorDrawable
import android.media.MediaDataSource
import android.os.Build
import android.util.AttributeSet
import android.view.View
@ -12,29 +13,32 @@ import android.view.View.OnTouchListener
import android.view.ViewGroup
import android.widget.FrameLayout
import android.widget.ImageView
import android.widget.SeekBar
import android.widget.SeekBar.OnSeekBarChangeListener
import android.widget.TextView
import androidx.annotation.RequiresApi
import androidx.core.content.ContextCompat
import androidx.core.graphics.BlendModeColorFilterCompat.createBlendModeColorFilterCompat
import androidx.core.graphics.BlendModeCompat
import com.pnikosis.materialishprogress.ProgressWheel
import kotlinx.coroutines.*
import network.loki.messenger.R
import org.greenrobot.eventbus.EventBus
import org.greenrobot.eventbus.Subscribe
import org.greenrobot.eventbus.ThreadMode
import org.thoughtcrime.securesms.attachments.Attachment
import org.thoughtcrime.securesms.audio.AudioSlidePlayer
import org.thoughtcrime.securesms.components.AnimatingToggle
import org.thoughtcrime.securesms.database.AttachmentDatabase
import org.thoughtcrime.securesms.events.PartProgressEvent
import org.thoughtcrime.securesms.logging.Log
import org.thoughtcrime.securesms.loki.utilities.audio.DecodedAudio
import org.thoughtcrime.securesms.loki.utilities.audio.calculateRms
import org.thoughtcrime.securesms.mms.AudioSlide
import org.thoughtcrime.securesms.mms.PartAuthority
import org.thoughtcrime.securesms.mms.SlideClickListener
import java.io.IOException
import java.io.InputStream
import java.lang.Exception
import java.util.*
import java.util.concurrent.TimeUnit
import kotlin.math.floor
class AudioView: FrameLayout, AudioSlidePlayer.Listener {
class MessageAudioView: FrameLayout, AudioSlidePlayer.Listener {
companion object {
private const val TAG = "AudioViewKt"
@ -51,14 +55,17 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
private var downloadListener: SlideClickListener? = null
private var audioSlidePlayer: AudioSlidePlayer? = null
private var backwardsCounter = 0
// private var backwardsCounter = 0
/** Background coroutine scope that is available when the view is attached to a window. */
private var asyncCoroutineScope: CoroutineScope? = null
constructor(context: Context): this(context, null)
constructor(context: Context, attrs: AttributeSet?): this(context, attrs, 0)
constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int): super(context, attrs, defStyleAttr) {
View.inflate(context, R.layout.audio_view, this)
View.inflate(context, R.layout.message_audio_view, this)
container = findViewById(R.id.audio_widget_container)
controlToggle = findViewById(R.id.control_toggle)
playButton = findViewById(R.id.play)
@ -74,7 +81,7 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
if (audioSlidePlayer != null) {
togglePlayToPause()
// Restart the playback if progress bar is near at the end.
// Restart the playback if progress bar is nearly at the end.
val progress = if (seekBar.progress < 0.99f) seekBar.progress.toDouble() else 0.0
audioSlidePlayer!!.play(progress)
@ -99,8 +106,6 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
}
}
}
//TODO Remove this.
seekBar.sample = Random().let { (0 until 64).map { i -> it.nextFloat() }.toFloatArray() }
playButton.setImageDrawable(ContextCompat.getDrawable(context, R.drawable.play_icon))
pauseButton.setImageDrawable(ContextCompat.getDrawable(context, R.drawable.pause_icon))
@ -108,10 +113,10 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
pauseButton.background = ContextCompat.getDrawable(context, R.drawable.ic_circle_fill_white_48dp)
if (attrs != null) {
val typedArray = context.theme.obtainStyledAttributes(attrs, R.styleable.AudioView, 0, 0)
setTint(typedArray.getColor(R.styleable.AudioView_foregroundTintColor, Color.WHITE),
typedArray.getColor(R.styleable.AudioView_backgroundTintColor, Color.WHITE))
container.setBackgroundColor(typedArray.getColor(R.styleable.AudioView_widgetBackground, Color.TRANSPARENT))
val typedArray = context.theme.obtainStyledAttributes(attrs, R.styleable.MessageAudioView, 0, 0)
setTint(typedArray.getColor(R.styleable.MessageAudioView_foregroundTintColor, Color.WHITE),
typedArray.getColor(R.styleable.MessageAudioView_backgroundTintColor, Color.WHITE))
container.setBackgroundColor(typedArray.getColor(R.styleable.MessageAudioView_widgetBackground, Color.TRANSPARENT))
typedArray.recycle()
}
}
@ -119,30 +124,42 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
override fun onAttachedToWindow() {
super.onAttachedToWindow()
if (!EventBus.getDefault().isRegistered(this)) EventBus.getDefault().register(this)
asyncCoroutineScope = CoroutineScope(Job() + Dispatchers.IO)
}
override fun onDetachedFromWindow() {
super.onDetachedFromWindow()
EventBus.getDefault().unregister(this)
// Cancel all the background operations.
asyncCoroutineScope!!.cancel()
asyncCoroutineScope = null
}
fun setAudio(audio: AudioSlide, showControls: Boolean) {
if (showControls && audio.isPendingDownload) {
controlToggle.displayQuick(downloadButton)
seekBar.isEnabled = false
downloadButton.setOnClickListener { v -> downloadListener?.onClick(v, audio) }
if (downloadProgress.isSpinning) {
downloadProgress.stopSpinning()
when {
showControls && audio.isPendingDownload -> {
controlToggle.displayQuick(downloadButton)
seekBar.isEnabled = false
downloadButton.setOnClickListener { v -> downloadListener?.onClick(v, audio) }
if (downloadProgress.isSpinning) {
downloadProgress.stopSpinning()
}
}
} else if (showControls && audio.transferState == AttachmentDatabase.TRANSFER_PROGRESS_STARTED) {
controlToggle.displayQuick(downloadProgress)
seekBar.isEnabled = false
downloadProgress.spin()
} else {
controlToggle.displayQuick(playButton)
seekBar.isEnabled = true
if (downloadProgress.isSpinning) {
downloadProgress.stopSpinning()
(showControls && audio.transferState == AttachmentDatabase.TRANSFER_PROGRESS_STARTED) -> {
controlToggle.displayQuick(downloadProgress)
seekBar.isEnabled = false
downloadProgress.spin()
}
else -> {
controlToggle.displayQuick(playButton)
seekBar.isEnabled = true
if (downloadProgress.isSpinning) {
downloadProgress.stopSpinning()
}
// Post to make sure it executes only when the view is attached to a window.
post(::updateSeekBarFromAudio)
}
}
audioSlidePlayer = AudioSlidePlayer.createFor(context, audio, this)
@ -246,27 +263,47 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
pauseToPlayDrawable.start()
}
// private inner class SeekBarModifiedListener : OnSeekBarChangeListener {
// override fun onProgressChanged(seekBar: SeekBar, progress: Int, fromUser: Boolean) {}
//
// @Synchronized
// override fun onStartTrackingTouch(seekBar: SeekBar) {
// if (audioSlidePlayer != null && pauseButton.visibility == View.VISIBLE) {
// audioSlidePlayer!!.stop()
// }
// }
//
// @Synchronized
// override fun onStopTrackingTouch(seekBar: SeekBar) {
// try {
// if (audioSlidePlayer != null && pauseButton.visibility == View.VISIBLE) {
// audioSlidePlayer!!.play(getProgress())
// }
// } catch (e: IOException) {
// Log.w(TAG, e)
// }
// }
// }
private fun updateSeekBarFromAudio() {
if (audioSlidePlayer == null) return
val attachment = audioSlidePlayer!!.audioSlide.asAttachment()
// Parse audio and compute RMS values for the WaveformSeekBar in the background.
asyncCoroutineScope!!.launch {
val rmsFrames = 32 // The amount of values to be computed to supply for the visualization.
fun extractAttachmentRandomSeed(attachment: Attachment): Int {
return when {
attachment.digest != null -> attachment.digest!!.sum()
attachment.fileName != null -> attachment.fileName.hashCode()
else -> attachment.hashCode()
}
}
fun generateFakeRms(seed: Int, frames: Int = rmsFrames): FloatArray {
return Random(seed.toLong()).let { (0 until frames).map { i -> it.nextFloat() }.toFloatArray() }
}
val rmsValues: FloatArray
rmsValues = if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
// Due to API version incompatibility, we just display some random waveform for older API.
generateFakeRms(extractAttachmentRandomSeed(attachment))
} else {
try {
@Suppress("BlockingMethodInNonBlockingContext")
PartAuthority.getAttachmentStream(context, attachment.dataUri!!).use {
DecodedAudio(InputStreamMediaDataSource(it)).calculateRms(rmsFrames)
}
} catch (e: Exception) {
android.util.Log.w(TAG, "Failed to decode sample values for the audio attachment \"${attachment.fileName}\".", e)
generateFakeRms(extractAttachmentRandomSeed(attachment))
}
}
post { seekBar.sample = rmsValues }
}
}
@Subscribe(sticky = true, threadMode = ThreadMode.MAIN)
fun onEventAsync(event: PartProgressEvent) {
@ -274,4 +311,35 @@ class AudioView: FrameLayout, AudioSlidePlayer.Listener {
downloadProgress.setInstantProgress(event.progress.toFloat() / event.total)
}
}
}
@RequiresApi(Build.VERSION_CODES.M)
private class InputStreamMediaDataSource: MediaDataSource {
private val data: ByteArray
constructor(inputStream: InputStream): super() {
this.data = inputStream.readBytes()
}
override fun readAt(position: Long, buffer: ByteArray, offset: Int, size: Int): Int {
val length: Int = data.size
if (position >= length) {
return -1 // -1 indicates EOF
}
var actualSize = size
if (position + size > length) {
actualSize -= (position + size - length).toInt()
}
System.arraycopy(data, position.toInt(), buffer, offset, actualSize)
return actualSize
}
override fun getSize(): Long {
return data.size.toLong()
}
override fun close() {
// We don't need to close the wrapped stream.
}
}

View File

@ -1,4 +1,4 @@
package org.thoughtcrime.securesms.components
package org.thoughtcrime.securesms.loki.views
import android.content.Context
import android.graphics.*
@ -81,20 +81,20 @@ class WaveformSeekBar : View {
}
var waveGap: Float =
dp(
context,
2f
)
dp(
context,
2f
)
set(value) {
field = value
invalidate()
}
var waveWidth: Float =
dp(
context,
5f
)
dp(
context,
5f
)
set(value) {
field = value
invalidate()
@ -107,17 +107,17 @@ class WaveformSeekBar : View {
}
var waveCornerRadius: Float =
dp(
context,
2.5f
)
dp(
context,
2.5f
)
set(value) {
field = value
invalidate()
}
var waveGravity: WaveGravity =
WaveGravity.CENTER
WaveGravity.CENTER
set(value) {
field = value
invalidate()
@ -137,10 +137,10 @@ class WaveformSeekBar : View {
private var canvasWidth = 0
private var canvasHeight = 0
private var maxValue =
dp(
context,
2f
)
dp(
context,
2f
)
private var touchDownX = 0f
private var scaledTouchSlop = ViewConfiguration.get(context).scaledTouchSlop
@ -171,9 +171,9 @@ class WaveformSeekBar : View {
typedAttrs.getColor(R.styleable.WaveformSeekBar_wave_progress_color, waveProgressColor)
progress = typedAttrs.getFloat(R.styleable.WaveformSeekBar_wave_progress, progress)
waveGravity =
WaveGravity.fromString(
typedAttrs.getString(R.styleable.WaveformSeekBar_wave_gravity)
)
WaveGravity.fromString(
typedAttrs.getString(R.styleable.WaveformSeekBar_wave_gravity)
)
typedAttrs.recycle()
}

View File

@ -41,7 +41,7 @@ import androidx.annotation.Nullable;
import org.thoughtcrime.securesms.MediaPreviewActivity;
import org.thoughtcrime.securesms.TransportOption;
import org.thoughtcrime.securesms.attachments.Attachment;
import org.thoughtcrime.securesms.components.AudioView;
import org.thoughtcrime.securesms.loki.views.MessageAudioView;
import org.thoughtcrime.securesms.components.DocumentView;
import org.thoughtcrime.securesms.components.RemovableEditableMediaView;
import org.thoughtcrime.securesms.components.ThumbnailView;
@ -91,7 +91,7 @@ public class AttachmentManager {
private RemovableEditableMediaView removableMediaView;
private ThumbnailView thumbnail;
private AudioView audioView;
private MessageAudioView audioView;
private DocumentView documentView;
private SignalMapView mapView;