Support for Signal calls.

Merge in RedPhone

// FREEBIE
This commit is contained in:
Moxie Marlinspike
2015-09-09 13:54:29 -07:00
parent 3d4ae60d81
commit d83a3d71bc
2585 changed files with 803492 additions and 45 deletions

125
jni/redphone/AudioCodec.cpp Normal file
View File

@@ -0,0 +1,125 @@
#include "AudioCodec.h"
#include <speex/speex.h>
//#include <speex/speex_preprocess.h>
#include <stdlib.h>
#include <stdio.h>
#include <stdarg.h>
#include <android/log.h>
#define TAG "AudioCodec"
#define ECHO_TAIL_MILLIS 75
AudioCodec::AudioCodec() : enc(NULL), dec(NULL), aecm(NULL), ns(NULL), initialized(0)
{ }
int AudioCodec::init() {
if ((enc = speex_encoder_init(speex_lib_get_mode(SPEEX_MODEID_NB))) == NULL) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Encoder failed to initialize!");
return -1;
}
if ((dec = speex_decoder_init(speex_lib_get_mode(SPEEX_MODEID_NB))) == NULL) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Decoder failed to initialize!");
return -1;
}
if (WebRtcAecm_Create(&aecm) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "AECM failed to create!");
return -1;
}
if (WebRtcAecm_Init(aecm, SPEEX_SAMPLE_RATE) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "AECM failed to initialize!");
return -1;
}
// if (WebRtcNsx_Create(&ns) != 0) {
// __android_log_print(ANDROID_LOG_WARN, TAG, "NS failed to create!");
// return -1;
// }
//
// if (WebRtcNsx_Init(ns, SPEEX_SAMPLE_RATE) != 0) {
// __android_log_print(ANDROID_LOG_WARN, TAG, "NS failed to initialize!");
// return -1;
// }
//
// if (WebRtcNsx_set_policy(ns, 0) != 0) { // "Mild"
// __android_log_print(ANDROID_LOG_WARN, TAG, "NS policy failed!");
// return -1;
// }
spx_int32_t config = 1;
speex_decoder_ctl(dec, SPEEX_SET_ENH, &config);
config = 0;
speex_encoder_ctl(enc, SPEEX_SET_VBR, &config);
config = 4;
speex_encoder_ctl(enc, SPEEX_SET_QUALITY, &config);
config = 1;
speex_encoder_ctl(enc, SPEEX_SET_COMPLEXITY, &config);
speex_encoder_ctl(enc, SPEEX_GET_FRAME_SIZE, &enc_frame_size );
speex_decoder_ctl(dec, SPEEX_GET_FRAME_SIZE, &dec_frame_size );
__android_log_print(ANDROID_LOG_WARN, TAG, "Encoding frame size: %d", enc_frame_size);
__android_log_print(ANDROID_LOG_WARN, TAG, "Decoding frame size: %d", dec_frame_size);
speex_bits_init(&enc_bits);
speex_bits_init(&dec_bits);
initialized = 1;
return 0;
}
AudioCodec::~AudioCodec() {
if (initialized) {
speex_bits_destroy( &enc_bits );
speex_bits_destroy( &dec_bits );
}
if (aecm != NULL) WebRtcAecm_Free(aecm);
if (enc != NULL) speex_encoder_destroy( enc );
if (dec != NULL) speex_decoder_destroy( dec );
}
int AudioCodec::encode(short *rawData, char* encodedData, int maxEncodedDataLen) {
// short nonoiseData[SPEEX_FRAME_SIZE];
short cleanData[SPEEX_FRAME_SIZE];
// WebRtcNsx_Process(ns, rawData, NULL, nonoiseData, NULL);
// WebRtcNsx_Process(ns, rawData+80, NULL, nonoiseData+80, NULL);
WebRtcAecm_Process(aecm, rawData, NULL, cleanData, SPEEX_FRAME_SIZE, ECHO_TAIL_MILLIS);
speex_bits_reset(&enc_bits);
speex_encode_int(enc, (spx_int16_t *)cleanData, &enc_bits);
return speex_bits_write(&enc_bits, encodedData, maxEncodedDataLen);
}
int AudioCodec::decode(char* encodedData, int encodedDataLen, short *rawData) {
int rawDataOffset = 0;
speex_bits_read_from(&dec_bits, encodedData, encodedDataLen);
while (speex_decode_int(dec, &dec_bits, rawData + rawDataOffset) == 0) { // TODO bounds?
WebRtcAecm_BufferFarend(aecm, rawData + rawDataOffset, dec_frame_size);
rawDataOffset += dec_frame_size;
}
return rawDataOffset;
}
int AudioCodec::conceal(int frames, short *rawData) {
int i=0;
for (i=0;i<frames;i++) {
speex_decode_int(dec, NULL, rawData + (i * dec_frame_size));
}
return frames * dec_frame_size;
}

44
jni/redphone/AudioCodec.h Normal file
View File

@@ -0,0 +1,44 @@
#ifndef __AUDIO_CODEC_H__
#define __AUDIO_CODEC_H__
#include <sys/types.h>
#include <speex/speex.h>
#include <speex/speex_echo.h>
#include <modules/audio_processing/aecm/include/echo_control_mobile.h>
#include <modules/audio_processing/ns/include/noise_suppression_x.h>
#define SPEEX_BIT_RATE 8000
#define SPEEX_SAMPLE_RATE 8000
#define SPEEX_FRAME_RATE 50
#define SPEEX_FRAME_SIZE (SPEEX_SAMPLE_RATE / SPEEX_FRAME_RATE)
#define SPEEX_ENCODED_FRAME_SIZE 20
class AudioCodec {
private:
void *enc; //speex encoder
void *dec; //speex decoder
void *aecm;
NsxHandle *ns;
SpeexBits enc_bits, dec_bits;
SpeexEchoState *echo_state;
int enc_frame_size, dec_frame_size;
int initialized;
public:
AudioCodec();
~AudioCodec();
int init();
int encode(short *rawData, char* encodedData, int encodedDataLen);
int decode(char* encodedData, int encodedDataLen, short* rawData);
int conceal(int frames, short *rawData);
};
#endif

View File

@@ -0,0 +1,133 @@
#include "AudioPlayer.h"
#include "EncodedAudioData.h"
#include <android/log.h>
#define TAG "AudioPlayer"
AudioPlayer::AudioPlayer(WebRtcJitterBuffer &webRtcJitterBuffer, AudioCodec &audioCodec) :
webRtcJitterBuffer(webRtcJitterBuffer), audioCodec(audioCodec),
bqPlayerObject(NULL), bqPlayerPlay(NULL), outputMixObject(NULL), bqPlayerBufferQueue(NULL)
{
}
AudioPlayer::~AudioPlayer() {
}
void AudioPlayer::playerCallback(SLAndroidSimpleBufferQueueItf bufferQueue, void *context) {
AudioPlayer* audioPlayer = static_cast<AudioPlayer*>(context);
audioPlayer->playerCallback(bufferQueue);
}
void AudioPlayer::playerCallback(SLAndroidSimpleBufferQueueItf bufferQueue) {
int samples = webRtcJitterBuffer.getAudio(outputBuffer, FRAME_SIZE);
// __android_log_print(ANDROID_LOG_WARN, TAG, "Jitter gave me: %d samples", samples);
(*bufferQueue)->Enqueue(bufferQueue, outputBuffer, samples * sizeof(short));
}
int AudioPlayer::start(SLEngineItf *engineEnginePtr) {
SLEngineItf engineEngine = *engineEnginePtr;
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_8,
SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_LEFT, SL_BYTEORDER_LITTLEENDIAN};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
const SLInterfaceID mixIds[] = {SL_IID_VOLUME};
const SLboolean mixReq[] = {SL_BOOLEAN_FALSE};
if ((*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, mixIds, mixReq) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "CreateOutputMix failed!");
return -1;
}
if ((*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Realize OutputMix failed!");
return -1;
}
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
const SLInterfaceID ids[2] = {SL_IID_ANDROIDCONFIGURATION, SL_IID_BUFFERQUEUE};
const SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
if ((*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk, 2, ids, req) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "CreateAudioPlayer failed!");
return -1;
}
SLAndroidConfigurationItf playerConfig;
if ((*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_ANDROIDCONFIGURATION, &playerConfig) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Get AndroidConfiguration interface failed!");
return -1;
}
SLint32 streamType = SL_ANDROID_STREAM_VOICE;
if ((*playerConfig)->SetConfiguration(playerConfig, SL_ANDROID_KEY_STREAM_TYPE, &streamType, sizeof(SLint32)) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Setting SL_ANDROID_STREAM_VOICE failed!");
return -1;
}
if ((*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Realize PlayerObject failed!");
return -1;
}
if ((*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "GetInterface PlayerObject failed!");
return -1;
}
if ((*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE, &bqPlayerBufferQueue) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "BufferQueue failed!");
return -1;
}
if ((*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, &AudioPlayer::playerCallback, this) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "RegisterCallback failed!");
return -1;
}
memset(outputBuffer, 0, FRAME_SIZE * sizeof(short));
if ((*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, outputBuffer, FRAME_SIZE * sizeof(short)) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Player enqueue failed!");
return -1;
}
if ((*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Play state failed!");
return -1;
}
return 0;
}
void AudioPlayer::stop() {
if (bqPlayerPlay != NULL) {
(*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_STOPPED);
}
if (bqPlayerBufferQueue != NULL) {
(*bqPlayerBufferQueue)->Clear(bqPlayerBufferQueue);
}
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
bqPlayerObject = NULL;
}
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
}
}

View File

@@ -0,0 +1,45 @@
#ifndef __AUDIO_PLAYER_H__
#define __AUDIO_PLAYER_H__
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <SLES/OpenSLES_AndroidConfiguration.h>
#include "WebRtcJitterBuffer.h"
#include "AudioCodec.h"
#include "JitterBuffer.h"
#define SAMPLE_RATE 8000
#define FRAME_RATE 50
#define FRAME_SIZE SAMPLE_RATE / FRAME_RATE
class AudioPlayer {
private:
// JitterBuffer &jitterBuffer;
WebRtcJitterBuffer &webRtcJitterBuffer;
AudioCodec &audioCodec;
// int sampleRate;
// int bufferFrames;
SLObjectItf bqPlayerObject;
SLPlayItf bqPlayerPlay;
SLObjectItf outputMixObject;
SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
short outputBuffer[FRAME_SIZE];
public:
AudioPlayer(WebRtcJitterBuffer &jitterBuffer, AudioCodec &audioCodec);
~AudioPlayer();
int start(SLEngineItf *engineEngine);
void stop();
static void playerCallback(SLAndroidSimpleBufferQueueItf bufferQueue, void *context);
void playerCallback(SLAndroidSimpleBufferQueueItf bufferQueue);
};
#endif

View File

@@ -0,0 +1,294 @@
#include "AudioCodec.h"
#include "MicrophoneReader.h"
#include "SequenceCounter.h"
#include "JitterBuffer.h"
#include "RtpAudioReceiver.h"
#include "RtpAudioSender.h"
#include "AudioPlayer.h"
#include "NetworkUtil.h"
#include "CallAudioManager.h"
#include <string.h>
#include <stdint.h>
#include <unistd.h>
#include <jni.h>
#include <android/log.h>
#define TAG "CallAudioManager"
CallAudioManager::CallAudioManager(int androidSdkVersion, int socketFd,
struct sockaddr *sockAddr, int sockAddrLen,
SrtpStreamParameters *senderParameters, SrtpStreamParameters *receiverParameters)
: running(0), finished(1), engineObject(NULL), engineEngine(NULL), audioCodec(),
audioSender(socketFd, sockAddr, sockAddrLen, senderParameters),
audioReceiver(socketFd, receiverParameters),
webRtcJitterBuffer(audioCodec), clock(),
microphoneReader(androidSdkVersion, audioCodec, audioSender, clock),
audioPlayer(webRtcJitterBuffer, audioCodec),
sockAddr(sockAddr)
{
}
int CallAudioManager::init() {
if (pthread_mutex_init(&mutex, NULL) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to create mutex!");
return -1;
}
if (pthread_cond_init(&condition, NULL) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to create condition!");
return -1;
}
return 0;
}
CallAudioManager::~CallAudioManager() {
__android_log_print(ANDROID_LOG_WARN, TAG, "Shutting down...");
microphoneReader.stop();
audioPlayer.stop();
webRtcJitterBuffer.stop();
if (sockAddr != NULL) {
free(sockAddr);
}
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
}
__android_log_print(ANDROID_LOG_WARN, TAG, "Shutdown complete....");
}
int CallAudioManager::start() {
running = 1;
finished = 0;
if (slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to create engineObject!");
return -1;
}
if ((*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to realize engineObject!");
return -1;
}
if ((*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine) != SL_RESULT_SUCCESS) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to get engine interface!");
return -1;
}
if (audioCodec.init() != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to initialize codec!");
return -1;
}
if (audioSender.init() != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to initialize RTP sender!");
return -1;
}
if (audioReceiver.init() != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to initialize RTP receiver!");
return -1;
}
if (webRtcJitterBuffer.init() != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to initialize jitter buffer!");
return -1;
}
__android_log_print(ANDROID_LOG_WARN, TAG, "Starting MicrophoneReader...");
if (microphoneReader.start(&engineEngine) == -1) {
__android_log_print(ANDROID_LOG_WARN, TAG, "ERROR -- MicrophoneReader::start() returned -1!");
return -1;
}
__android_log_print(ANDROID_LOG_WARN, TAG, "Starting AudioPlayer...");
if (audioPlayer.start(&engineEngine) == -1) {
__android_log_print(ANDROID_LOG_WARN, TAG, "AudioPlayer::start() returned -1!");
return -1;
}
char buffer[4096];
while(running) {
RtpPacket *packet = audioReceiver.receive(buffer, sizeof(buffer));
if (packet != NULL) {
if (packet->getTimestamp() == 0) {
packet->setTimestamp(clock.getImprovisedTimestamp(packet->getPayloadLen()));
}
webRtcJitterBuffer.addAudio(packet, clock.getTickCount());
delete packet;
}
}
if (pthread_mutex_lock(&mutex) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to acquire mutex!");
return 0;
}
finished = 1;
pthread_cond_signal(&condition);
pthread_mutex_unlock(&mutex);
return 0;
}
void CallAudioManager::stop() {
running = 0;
microphoneReader.stop();
audioPlayer.stop();
webRtcJitterBuffer.stop();
pthread_mutex_lock(&mutex);
while (finished == 0) {
pthread_cond_wait(&condition, &mutex);
}
pthread_mutex_unlock(&mutex);
usleep(40000); // Duration of microphone frame.
}
void CallAudioManager::setMute(int muteEnabled) {
microphoneReader.setMute(muteEnabled);
}
static void constructSockAddr(JNIEnv *env, jstring serverIpString, jint serverPort,
struct sockaddr** result, int *resultLen)
{
const char* serverIp = env->GetStringUTFChars(serverIpString, 0);
int addressType = NetworkUtil::getAddressType(serverIp);
if (addressType == 1) {
struct sockaddr_in *sockAddr = (struct sockaddr_in*)malloc(sizeof(struct sockaddr_in));
memset(sockAddr, 0, sizeof(struct sockaddr_in));
sockAddr->sin_family = AF_INET;
sockAddr->sin_port = htons(serverPort);
if (inet_aton(serverIp, &(sockAddr->sin_addr)) == 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Invalid address: %s", serverIp);
free(sockAddr);
sockAddr = NULL;
}
*result = (struct sockaddr*)sockAddr;
*resultLen = sizeof(struct sockaddr_in);
} else if (addressType == 0) {
struct sockaddr_in6 *sockAddr = (struct sockaddr_in6*)malloc(sizeof(struct sockaddr_in6));
memset(sockAddr, 0, sizeof(struct sockaddr_in6));
sockAddr->sin6_family = AF_INET6;
sockAddr->sin6_port = htons(serverPort);
if (inet_pton(AF_INET6, serverIp, &(sockAddr->sin6_addr)) != 1) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Invalid IPv6 address: %s", serverIp);
free(sockAddr);
sockAddr = NULL;
}
*result = (struct sockaddr*)sockAddr;
*resultLen = sizeof(struct sockaddr_in6);
} else {
__android_log_print(ANDROID_LOG_WARN, TAG, "Unknown address type: %d", addressType);
*result = NULL;
*resultLen = 0;
}
env->ReleaseStringUTFChars(serverIpString, serverIp);
}
static SrtpStreamParameters* constructSrtpStreamParameters(JNIEnv *env, jbyteArray cipherKey, jbyteArray macKey, jbyteArray salt) {
uint8_t* cipherKeyBytes = (uint8_t*)env->GetByteArrayElements(cipherKey, 0);
uint8_t* macKeyBytes = (uint8_t*)env->GetByteArrayElements(macKey, 0);
uint8_t* saltBytes = (uint8_t*)env->GetByteArrayElements(salt, 0);
SrtpStreamParameters *parameters = new SrtpStreamParameters(cipherKeyBytes, macKeyBytes, saltBytes);
env->ReleaseByteArrayElements(cipherKey, (jbyte*)cipherKeyBytes, 0);
env->ReleaseByteArrayElements(macKey, (jbyte*)macKeyBytes, 0);
env->ReleaseByteArrayElements(salt, (jbyte*)saltBytes, 0);
return parameters;
}
jlong JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_create
(JNIEnv *env, jobject obj, jint androidSdkVersion,
jint socketFd, jstring serverIpString, jint serverPort,
jbyteArray senderCipherKey, jbyteArray senderMacKey, jbyteArray senderSalt,
jbyteArray receiverCipherKey, jbyteArray receiverMacKey, jbyteArray receiverSalt)
{
struct sockaddr *sockAddr;
int sockAddrLen;
constructSockAddr(env, serverIpString, serverPort, &sockAddr, &sockAddrLen);
if (sockAddr == NULL) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to construct sockAddr!");
env->ThrowNew(env->FindClass("org/thoughtcrime/redphone/audio/NativeAudioException"),
"Failed to initialize native audio");
return -1;
}
SrtpStreamParameters *senderParameters = constructSrtpStreamParameters(env, senderCipherKey, senderMacKey, senderSalt);
SrtpStreamParameters *receiverParameters = constructSrtpStreamParameters(env, receiverCipherKey, receiverMacKey, receiverSalt);
CallAudioManager *manager = new CallAudioManager(androidSdkVersion, socketFd, sockAddr, sockAddrLen,
senderParameters, receiverParameters);
if (manager->init() != 0) {
delete manager;
env->ThrowNew(env->FindClass("org/thoughtcrime/redphone/audio/NativeAudioException"),
"Failed to initialize native audio");
return -1;
}
return (jlong)manager;
}
void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_start
(JNIEnv *env, jobject obj, jlong handle)
{
CallAudioManager *manager = reinterpret_cast<CallAudioManager *>(handle);
int result = manager->start();
if (result == -1) {
env->ThrowNew(env->FindClass("org/thoughtcrime/redphone/audio/NativeAudioException"),
"Failed to start native audio");
}
}
void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_setMute
(JNIEnv *env, jobject obj, jlong handle, jboolean muteEnabled)
{
CallAudioManager *manager = reinterpret_cast<CallAudioManager *>(handle);
manager->setMute(muteEnabled);
}
void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_stop
(JNIEnv *env, jobject obj, jlong handle)
{
CallAudioManager *manager = reinterpret_cast<CallAudioManager*>(handle);
manager->stop();
}
void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_dispose
(JNIEnv *env, jobject obj, jlong handle)
{
CallAudioManager *manager = reinterpret_cast<CallAudioManager*>(handle);
delete manager;
}

View File

@@ -0,0 +1,71 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_thoughtcrime_redphone_audio_CallAudioManager2 */
#ifndef _Included_org_thoughtcrime_redphone_audio_CallAudioManager2
#define _Included_org_thoughtcrime_redphone_audio_CallAudioManager2
#ifdef __cplusplus
#include <pthread.h>
#include "SrtpStream.h"
#include "Clock.h"
class CallAudioManager {
private:
volatile int running;
int finished;
SLObjectItf engineObject;
SLEngineItf engineEngine;
AudioCodec audioCodec;
RtpAudioSender audioSender;
RtpAudioReceiver audioReceiver;
WebRtcJitterBuffer webRtcJitterBuffer;
Clock clock;
MicrophoneReader microphoneReader;
AudioPlayer audioPlayer;
struct sockaddr *sockAddr;
pthread_cond_t condition;
pthread_mutex_t mutex;
public:
CallAudioManager(int androidSdkVersion, int socketFd, struct sockaddr *sockAddr, int sockAddrLen,
SrtpStreamParameters *senderParameters, SrtpStreamParameters *receiverParameters);
~CallAudioManager();
int init();
int start();
void stop();
void setMute(int muteEnabled);
int isFinished();
};
extern "C" {
#endif
/*
* Class: org_thoughtcrime_redphone_audio_CallAudioManager
* Method: create
*/
JNIEXPORT jlong JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_create
(JNIEnv *, jobject, jint, jint, jstring, jint,
jbyteArray, jbyteArray, jbyteArray, jbyteArray, jbyteArray, jbyteArray);
JNIEXPORT void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_start
(JNIEnv *, jobject, jlong);
JNIEXPORT void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_stop
(JNIEnv *, jobject, jlong);
JNIEXPORT void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_setMute
(JNIEnv *, jobject, jlong, jboolean);
JNIEXPORT void JNICALL Java_org_thoughtcrime_redphone_audio_CallAudioManager_dispose
(JNIEnv *, jobject, jlong);
#ifdef __cplusplus
}
#endif
#endif

34
jni/redphone/Clock.h Normal file
View File

@@ -0,0 +1,34 @@
#ifndef __CLOCK_H__
#define __CLOCK_H__
#include "AudioCodec.h"
#include <android/log.h>
class Clock {
private:
volatile uint32_t tickCount;
uint32_t dataReceived;
public:
Clock() : tickCount(0), dataReceived(0) {}
uint32_t tick(int frames) {
tickCount += (frames * SPEEX_FRAME_SIZE);
return tickCount;
}
uint32_t getTickCount() {
return tickCount;
}
uint32_t getImprovisedTimestamp(int dataLen) {
dataReceived += dataLen;
return (dataReceived / SPEEX_ENCODED_FRAME_SIZE) * SPEEX_FRAME_SIZE;
}
};
#endif

View File

@@ -0,0 +1,40 @@
#ifndef __ENCODED_AUDIO_DATA_H__
#define __ENCODED_AUDIO_DATA_H__
#include <sys/types.h>
#include <string.h>
#include <stdlib.h>
class EncodedAudioData {
private:
char *data;
int dataLen;
int64_t sequence;
public:
EncodedAudioData(char* encoded, int encodedLen, int64_t sequence) :
data(NULL), dataLen(encodedLen), sequence(sequence)
{
data = (char*)malloc(encodedLen);
memcpy(data, encoded, encodedLen);
}
~EncodedAudioData() {
free(data);
}
int64_t getSequence() {
return sequence;
}
char* getData() {
return data;
}
int getDataLen() {
return dataLen;
}
};
#endif

View File

@@ -0,0 +1,34 @@
#include "JitterBuffer.h"
#include <android/log.h>
#define TAG "JitterBuffer"
JitterBuffer::JitterBuffer() :
pendingAudio()
{
pthread_mutex_init(&lock, NULL);
}
void JitterBuffer::addAudio(int64_t sequence, char* encodedData, int encodedDataLen) {
EncodedAudioData *encodedAudioData = new EncodedAudioData(encodedData, encodedDataLen, sequence);
pthread_mutex_lock(&lock);
pendingAudio.push(encodedAudioData);
__android_log_print(ANDROID_LOG_WARN, TAG, "Queue Size: %d", pendingAudio.size());
pthread_mutex_unlock(&lock);
}
EncodedAudioData* JitterBuffer::getAudio() {
EncodedAudioData *next = NULL;
pthread_mutex_lock(&lock);
if (!pendingAudio.empty()) {
next = pendingAudio.top();
pendingAudio.pop();
}
pthread_mutex_unlock(&lock);
return next;
}

View File

@@ -0,0 +1,34 @@
#ifndef __JITTER_BUFFER_H__
#define __JITTER_BUFFER_H__
#include <iostream>
#include <queue>
#include <iomanip>
#include <pthread.h>
#include "EncodedAudioData.h"
class CompareSequence {
public:
bool operator()(EncodedAudioData *lh, EncodedAudioData *rh)
{
return lh->getSequence() > rh->getSequence();
}
};
class JitterBuffer {
private:
pthread_mutex_t lock;
std::priority_queue<EncodedAudioData*, std::vector<EncodedAudioData*>, CompareSequence> pendingAudio;
public:
JitterBuffer();
void addAudio(int64_t sequence, char* encodedAudio, int encodedAudioLen);
EncodedAudioData* getAudio();
};
#endif

View File

@@ -0,0 +1,124 @@
#include "MicrophoneReader.h"
#include "SampleRateUtil.h"
#include <jni.h>
#include <android/log.h>
#define TAG "MicrophoneReader"
#ifndef SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION
#define SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION ((SLuint32) 0x00000004)
#endif
MicrophoneReader::MicrophoneReader(int androidSdkVersion, AudioCodec &audioCodec, RtpAudioSender &rtpAudioSender, Clock &clock) :
androidSdkVersion(androidSdkVersion), muteEnabled(0),
audioCodec(audioCodec), rtpAudioSender(rtpAudioSender), clock(clock),
recorderObject(NULL), recorderRecord(NULL), recorderBufferQueue(NULL)
{
}
MicrophoneReader::~MicrophoneReader() {
}
void MicrophoneReader::recorderCallback(SLAndroidSimpleBufferQueueItf bufferQueue, void *context) {
MicrophoneReader* microphoneReader = static_cast<MicrophoneReader*>(context);
microphoneReader->recorderCallback(bufferQueue);
}
void MicrophoneReader::recorderCallback(SLAndroidSimpleBufferQueueItf bufferQueue)
{
if (muteEnabled) {
memset(inputBuffer, 0, FRAME_SIZE * 2 * sizeof(short));
}
int encodedAudioLen = audioCodec.encode(inputBuffer, encodedAudio, sizeof(encodedAudio));
encodedAudioLen += audioCodec.encode(inputBuffer + FRAME_SIZE, encodedAudio + encodedAudioLen, sizeof(encodedAudio) - encodedAudioLen);
rtpAudioSender.send(clock.tick(2), encodedAudio, encodedAudioLen);
(*bufferQueue)->Enqueue(bufferQueue, inputBuffer, FRAME_SIZE * 2 * sizeof(short));
}
void MicrophoneReader::setMute(int muteEnabled) {
this->muteEnabled = muteEnabled;
}
int MicrophoneReader::start(SLEngineItf *engineEnginePtr) {
SLEngineItf engineEngine = *engineEnginePtr;
SLDataLocator_AndroidSimpleBufferQueue loc_bq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 1};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_8,
SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN};
SLDataLocator_IODevice loc_dev = {SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT, SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
SLDataSource audioSrc = {&loc_dev, NULL};
SLDataSink audioSnk = {&loc_bq, &format_pcm};
const SLInterfaceID id[2] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION};
const SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
if ((*engineEngine)->CreateAudioRecorder(engineEngine, &recorderObject, &audioSrc,
&audioSnk, 2, id, req) != SL_RESULT_SUCCESS)
{
return -1;
}
if ((*recorderObject)->GetInterface(recorderObject, SL_IID_ANDROIDCONFIGURATION, &androidConfig) == SL_RESULT_SUCCESS) {
SLint32 recordingPreset = SL_ANDROID_RECORDING_PRESET_GENERIC;
if (androidSdkVersion >= 14) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Using voice communication Microphone preset...");
recordingPreset = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
}
(*androidConfig)->SetConfiguration(androidConfig, SL_ANDROID_KEY_RECORDING_PRESET,
&recordingPreset, sizeof(SLint32));
}
if ((*recorderObject)->Realize(recorderObject, SL_BOOLEAN_FALSE) != SL_RESULT_SUCCESS) {
return -1;
}
if ((*recorderObject)->GetInterface(recorderObject, SL_IID_RECORD, &recorderRecord) != SL_RESULT_SUCCESS) {
return -1;
}
if ((*recorderObject)->GetInterface(recorderObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &recorderBufferQueue) != SL_RESULT_SUCCESS) {
return -1;
}
if ((*recorderBufferQueue)->RegisterCallback(recorderBufferQueue, &MicrophoneReader::recorderCallback, this) != SL_RESULT_SUCCESS) {
return -1;
}
if ((*recorderBufferQueue)->Enqueue(recorderBufferQueue, inputBuffer, FRAME_SIZE * 2 * sizeof(short)) != SL_RESULT_SUCCESS) {
return -1;
}
if ((*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_RECORDING) != SL_RESULT_SUCCESS) {
return -1;
}
return 0;
}
void MicrophoneReader::stop() {
if (recorderRecord != NULL) {
(*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
}
if (recorderBufferQueue != NULL) {
(*recorderBufferQueue)->Clear(recorderBufferQueue);
}
if (recorderObject != NULL) {
(*recorderObject)->Destroy(recorderObject);
recorderRecord = NULL;
recorderObject = NULL;
recorderBufferQueue = NULL;
}
}

View File

@@ -0,0 +1,50 @@
#ifndef __MICROPHONE_READER_H__
#define __MICROPHONE_READER_H__
#include "AudioCodec.h"
#include "RtpAudioSender.h"
#include "Clock.h"
#include <jni.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <SLES/OpenSLES_AndroidConfiguration.h>
#define SAMPLE_RATE 8000
#define FRAME_RATE 50
#define FRAME_SIZE SAMPLE_RATE / FRAME_RATE
class MicrophoneReader {
private:
int androidSdkVersion;
volatile int muteEnabled;
AudioCodec &audioCodec;
RtpAudioSender &rtpAudioSender;
Clock &clock;
short inputBuffer[FRAME_SIZE * 2];
char encodedAudio[1024];
SLObjectItf recorderObject;
SLRecordItf recorderRecord;
SLAndroidConfigurationItf androidConfig;
SLAndroidSimpleBufferQueueItf recorderBufferQueue;
public:
MicrophoneReader(int androidSdkVersion, AudioCodec &audioCodec, RtpAudioSender &rtpAudioSender, Clock &clock);
~MicrophoneReader();
int start(SLEngineItf *engineEngine);
void stop();
void setMute(int muteEnabled);
void recorderCallback(SLAndroidSimpleBufferQueueItf bufferQueue);
static void recorderCallback(SLAndroidSimpleBufferQueueItf bufferQueue, void* context);
};
#endif

View File

@@ -0,0 +1,37 @@
#include <string.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netdb.h>
#include <jni.h>
#include <android/log.h>
#include "NetworkUtil.h"
#define TAG "NetworkUtil"
int NetworkUtil::getAddressType(const char* serverIp) {
struct addrinfo hint, *res = NULL;
int result;
memset(&hint, 0, sizeof(hint));
hint.ai_family = PF_UNSPEC;
hint.ai_flags = AI_NUMERICHOST;
if (getaddrinfo(serverIp, NULL, &hint, &res) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "getaddrinfo failed! %s", serverIp);
result = -1;
} else if (res->ai_family == AF_INET) {
result = 1;
} else if (res->ai_family == AF_INET6) {
result = 0;
} else {
__android_log_print(ANDROID_LOG_WARN, TAG, "getaddrinfo returned unknown type for %s", serverIp);
result = -1;
}
freeaddrinfo(res);
return result;
}

View File

@@ -0,0 +1,9 @@
#ifndef __NETWORK_UTIL_H__
#define __NETWORK_UTIL_H__
class NetworkUtil {
public:
static int getAddressType(const char* serverIp);
};
#endif

View File

@@ -0,0 +1,38 @@
#include "RtpAudioReceiver.h"
#include <android/log.h>
#define TAG "RtpAudioReceiver"
RtpAudioReceiver::RtpAudioReceiver(int socketFd, SrtpStreamParameters *parameters) :
socketFd(socketFd), sequenceCounter(), srtpStream(parameters)
{
}
int RtpAudioReceiver::init() {
if (srtpStream.init() != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "SRTP stream failed to initialize!");
return -1;
}
return 0;
}
RtpPacket* RtpAudioReceiver::receive(char* encodedData, int encodedDataLen) {
int received = recv(socketFd, encodedData, encodedDataLen, 0);
if (received == -1) {
__android_log_print(ANDROID_LOG_WARN, TAG, "recv() failed!");
return NULL;
}
RtpPacket *packet = new RtpPacket(encodedData, received);
if (srtpStream.decrypt(*packet, sequenceCounter.convertNext(packet->getSequenceNumber())) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "SRTP decrypt failed!");
delete packet;
return NULL;
}
return packet;
}

View File

@@ -0,0 +1,32 @@
#ifndef __RTP_AUDIO_RECEIVER_H__
#define __RTP_AUDIO_RECEIVER_H__
#include "RtpPacket.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <arpa/inet.h>
#include <sys/socket.h>
#include "SequenceCounter.h"
#include "SrtpStream.h"
class RtpAudioReceiver {
private:
int socketFd;
SequenceCounter sequenceCounter;
SrtpStream srtpStream;
public:
RtpAudioReceiver(int socketFd, SrtpStreamParameters *parameters);
int init();
RtpPacket* receive(char* encodedData, int encodedDataLen);
};
#endif

View File

@@ -0,0 +1,43 @@
#include "RtpAudioSender.h"
#include "RtpPacket.h"
#include <android/log.h>
#include <errno.h>
#define TAG "RtpAudioSender"
RtpAudioSender::RtpAudioSender(int socketFd, struct sockaddr *sockAddr, int sockAddrLen,
SrtpStreamParameters *parameters) :
socketFd(socketFd), sequenceNumber(0), sockAddr(sockAddr), sockAddrLen(sockAddrLen),
srtpStream(parameters)
{
}
int RtpAudioSender::init() {
if (srtpStream.init() != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "SRTP stream failed!");
return -1;
}
return 0;
}
int RtpAudioSender::send(int timestamp, char* encodedData, int encodedDataLen) {
RtpPacket packet(encodedData, encodedDataLen, sequenceNumber, timestamp);
if (srtpStream.encrypt(packet, sequenceNumber++) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "SRTP encrypt() failed!");
return -1;
}
char* serializedPacket = packet.getSerializedPacket();
int serializedPacketLen = packet.getSerializedPacketLen();
if (sendto(socketFd, serializedPacket, serializedPacketLen, 0, sockAddr, sockAddrLen) == -1)
{
__android_log_print(ANDROID_LOG_WARN, TAG, "sendto() failed!");
return -1;
}
return 0;
}

View File

@@ -0,0 +1,35 @@
#ifndef __RTP_AUDIO_SENDER_H__
#define __RTP_AUDIO_SENDER_H__
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <arpa/inet.h>
#include <sys/socket.h>
//#include "srtp.h"
#include "SrtpStream.h"
class RtpAudioSender {
private:
int socketFd;
uint32_t sequenceNumber;
struct sockaddr *sockAddr;
int sockAddrLen;
SrtpStream srtpStream;
public:
RtpAudioSender(int socketFd, struct sockaddr *sockAddr, int sockAddrLen,
SrtpStreamParameters *parameters);
// ~RtpAudioSender();
int init();
int send(int timestamp, char *encodedData, int encodedDataLen);
};
#endif

View File

@@ -0,0 +1,78 @@
#include "RtpPacket.h"
#include <stdlib.h>
#include <string.h>
#include <arpa/inet.h>
#include "SrtpStream.h"
//#include <srtp.h>
RtpPacket::RtpPacket(char* packetBuf, int packetLen) {
packet = (char*)malloc(packetLen);
payloadLen = packetLen - sizeof(RtpHeader);
memcpy(packet, packetBuf, packetLen);
}
RtpPacket::RtpPacket(char* payload, int payloadBufLen, int sequenceNumber, int timestamp) {
packet = (char*)malloc(sizeof(RtpHeader) + payloadBufLen + SRTP_MAC_SIZE);
payloadLen = payloadBufLen;
memset(packet, 0, sizeof(RtpHeader) + payloadLen + SRTP_MAC_SIZE);
RtpHeader *header = (RtpHeader*)packet;
header->flags = htons(32768);
header->sequenceNumber = htons(sequenceNumber);
header->ssrc = 0;
header->timestamp = htonl(timestamp);
memcpy(packet + sizeof(RtpHeader), payload, payloadLen);
}
RtpPacket::~RtpPacket() {
free(packet);
}
uint16_t RtpPacket::getSequenceNumber() {
RtpHeader *header = (RtpHeader*)packet;
return ntohs(header->sequenceNumber);
}
int RtpPacket::getPayloadType() {
RtpHeader *header = (RtpHeader*)packet;
return header->flags & 0x7F;
}
uint32_t RtpPacket::getTimestamp() {
RtpHeader *header = (RtpHeader*)packet;
return ntohl(header->timestamp);
}
void RtpPacket::setTimestamp(uint32_t timestamp) {
RtpHeader *header = (RtpHeader*)packet;
header->timestamp = htonl(timestamp);
}
uint32_t RtpPacket::getSsrc() {
RtpHeader *header = (RtpHeader*)packet;
return ntohl(header->ssrc);
}
char* RtpPacket::getPayload() {
return packet + sizeof(RtpHeader);
}
uint32_t RtpPacket::getPayloadLen() {
return payloadLen;
}
void RtpPacket::setPayloadLen(uint32_t payloadLen) {
this->payloadLen = payloadLen;
}
char* RtpPacket::getSerializedPacket() {
return packet;
}
int RtpPacket::getSerializedPacketLen() {
return sizeof(RtpHeader) + payloadLen;
}

38
jni/redphone/RtpPacket.h Normal file
View File

@@ -0,0 +1,38 @@
#ifndef __RTP_PACKET_H__
#define __RTP_PACKET_H__
#include <sys/types.h>
typedef struct _RtpHeader {
uint16_t flags;
uint16_t sequenceNumber;
uint32_t timestamp;
uint32_t ssrc;
} RtpHeader;
class RtpPacket {
private:
char *packet;
int payloadLen;
public:
RtpPacket(char *packet, int packetLen);
RtpPacket(char *payload, int payloadLen, int sequenceNumber, int timestamp);
~RtpPacket();
uint16_t getSequenceNumber();
int getPayloadType();
uint32_t getTimestamp();
void setTimestamp(uint32_t timestamp);
uint32_t getSsrc();
char* getPayload();
uint32_t getPayloadLen();
void setPayloadLen(uint32_t len);
char* getSerializedPacket();
int getSerializedPacketLen();
};
#endif

View File

@@ -0,0 +1,33 @@
#ifndef __SAMPLE_RATE_UTIL_H__
#define __SAMPLE_RATE_UTIL_H__
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
class SampleRateUtil {
public:
static SLuint32 convertSampleRate(SLuint32 rate) {
switch(rate) {
case 8000: return SL_SAMPLINGRATE_8;
case 11025: return SL_SAMPLINGRATE_11_025;
case 12000: return SL_SAMPLINGRATE_12;
case 16000: return SL_SAMPLINGRATE_16;
case 22050: return SL_SAMPLINGRATE_22_05;
case 24000: return SL_SAMPLINGRATE_24;
case 32000: return SL_SAMPLINGRATE_32;
case 44100: return SL_SAMPLINGRATE_44_1;
case 48000: return SL_SAMPLINGRATE_48;
case 64000: return SL_SAMPLINGRATE_64;
case 88200: return SL_SAMPLINGRATE_88_2;
case 96000: return SL_SAMPLINGRATE_96;
case 192000: return SL_SAMPLINGRATE_192;
}
return -1;
}
};
#endif

View File

@@ -0,0 +1,38 @@
#ifndef __SEQUENCE_COUNTER_H__
#define __SEQUENCE_COUNTER_H__
#include <stdint.h>
#define INT16_MAX 0x7fff
#define INT16_MIN (-INT16_MAX - 1)
const int64_t ShortRange = ((int64_t)1) << 16;
class SequenceCounter {
private:
uint16_t prevShortId;
int64_t prevLongId;
// int64_t currentLongId;
public:
SequenceCounter() : prevShortId(0), prevLongId(0) {}
int64_t convertNext(uint16_t nextShortId) {
int64_t delta = (int64_t)nextShortId - (int64_t)prevShortId;
if (delta > INT16_MAX) delta -= ShortRange;
if (delta < INT16_MIN) delta += ShortRange;
int64_t nextLongId = prevLongId + delta;
prevShortId = nextShortId;
prevLongId = nextLongId;
return nextLongId;
}
};
#endif

View File

@@ -0,0 +1,97 @@
#include "SrtpStream.h"
#include <android/log.h>
#include <unistd.h>
#define AES_BLOCK_SIZE 16
#define TAG "SrtpStream"
SrtpStream::SrtpStream(SrtpStreamParameters *parameters) :
parameters(parameters)
{}
SrtpStream::~SrtpStream() {
if (parameters != NULL) {
delete parameters;
}
}
int SrtpStream::init() {
if (AES_set_encrypt_key(parameters->cipherKey, SRTP_AES_KEY_SIZE * 8, &key) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to set AES key!");
return -1;
}
return 0;
}
void SrtpStream::setIv(int64_t logicalSequence, uint32_t ssrc, uint8_t *salt, uint8_t *iv) {
memset(iv, 0, AES_BLOCK_SIZE);
memcpy(iv, salt, SRTP_SALT_SIZE);
iv[6] ^= (uint8_t)(ssrc >> 8);
iv[7] ^= (uint8_t)(ssrc);
iv[8] ^= (uint8_t)(logicalSequence >> 40);
iv[9] ^= (uint8_t)(logicalSequence >> 32);
iv[10] ^= (uint8_t)(logicalSequence >> 24);
iv[11] ^= (uint8_t)(logicalSequence >> 16);
iv[12] ^= (uint8_t)(logicalSequence >> 8);
iv[13] ^= (uint8_t)(logicalSequence);
}
int SrtpStream::decrypt(RtpPacket &packet, int64_t logicalSequence) {
uint8_t iv[AES_BLOCK_SIZE];
uint8_t ecount[AES_BLOCK_SIZE];
uint8_t ourMac[SRTP_MAC_SIZE];
uint32_t num = 0;
uint32_t digest = 0;
setIv(logicalSequence, packet.getSsrc(), parameters->salt, iv);
memset(ecount, 0, sizeof(ecount));
if (packet.getPayloadLen() < (SRTP_MAC_SIZE + 1)) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Packet shorter than MAC!");
return -1;
}
HMAC(EVP_sha1(), parameters->macKey, SRTP_MAC_KEY_SIZE,
(uint8_t*)packet.getSerializedPacket(), packet.getSerializedPacketLen() - SRTP_MAC_SIZE, ourMac, &digest);
if (memcmp(ourMac, packet.getSerializedPacket() + packet.getSerializedPacketLen() - SRTP_MAC_SIZE,
SRTP_MAC_SIZE) != 0)
{
__android_log_print(ANDROID_LOG_WARN, TAG, "MAC comparison failed!");
return -1;
}
packet.setPayloadLen(packet.getPayloadLen() - SRTP_MAC_SIZE);
AES_ctr128_encrypt((uint8_t*)packet.getPayload(), (uint8_t*)packet.getPayload(),
packet.getPayloadLen(), &key, iv, ecount, &num);
return 0;
}
int SrtpStream::encrypt(RtpPacket &packet, int64_t logicalSequence) {
uint8_t iv[AES_BLOCK_SIZE];
uint8_t ecount[AES_BLOCK_SIZE];
uint32_t num = 0;
uint32_t digest = 0;
setIv(logicalSequence, packet.getSsrc(), parameters->salt, iv);
memset(ecount, 0, sizeof(ecount));
AES_ctr128_encrypt((uint8_t*)packet.getPayload(), (uint8_t*)packet.getPayload(), packet.getPayloadLen(), &key, iv, ecount, &num);
HMAC(EVP_sha1(), parameters->macKey, SRTP_MAC_KEY_SIZE,
(uint8_t*)packet.getSerializedPacket(), packet.getSerializedPacketLen(),
(uint8_t*)packet.getSerializedPacket() + packet.getSerializedPacketLen(), &digest);
packet.setPayloadLen(packet.getPayloadLen() + SRTP_MAC_SIZE);
return 0;
}

50
jni/redphone/SrtpStream.h Normal file
View File

@@ -0,0 +1,50 @@
#ifndef __STRP_STREAM_H__
#define __STRP_STREAM_H__
#include <openssl/aes.h>
#include <openssl/hmac.h>
#define SRTP_AES_KEY_SIZE 16
#define SRTP_SALT_SIZE 14
#define SRTP_MAC_KEY_SIZE 20
#define SRTP_MAC_SIZE 20
#include "RtpPacket.h"
class SrtpStreamParameters {
public:
uint8_t cipherKey[SRTP_AES_KEY_SIZE];
uint8_t macKey[SRTP_MAC_KEY_SIZE];
uint8_t salt[SRTP_SALT_SIZE];
SrtpStreamParameters(uint8_t *cipherKeyPtr, uint8_t* macKeyPtr, uint8_t *saltPtr)
{
memcpy(cipherKey, cipherKeyPtr, SRTP_AES_KEY_SIZE);
memcpy(macKey, macKeyPtr, SRTP_MAC_KEY_SIZE);
memcpy(salt, saltPtr, SRTP_SALT_SIZE);
}
};
class SrtpStream {
private:
SrtpStreamParameters *parameters;
AES_KEY key;
void setIv(int64_t logicalSequence, uint32_t ssrc, uint8_t *salt, uint8_t *iv);
public:
SrtpStream(SrtpStreamParameters *parameters);
~SrtpStream();
int init();
int decrypt(RtpPacket &packet, int64_t logicalSequence);
int encrypt(RtpPacket &packet, int64_t logicalSequence);
};
#endif

View File

@@ -0,0 +1,51 @@
#ifndef __WEB_RTC_CODEC_H__
#define __WEB_RTC_CODEC_H__
#include "AudioCodec.h"
#include <sys/types.h>
#include <modules/audio_coding/neteq/interface/audio_decoder.h>
class WebRtcCodec : public webrtc::AudioDecoder {
private:
AudioCodec &codec;
public:
WebRtcCodec(AudioCodec &codec) :
AudioDecoder(webrtc::kDecoderArbitrary), codec(codec)
{}
int Decode(const uint8_t* encoded, size_t encoded_len,
int16_t* decoded, SpeechType* speech_type)
{
*speech_type = kSpeech;
return codec.decode((char*)encoded, encoded_len, decoded);
}
bool HasDecodePlc() const {
return 1;
}
int DecodePlc(int num_frames, int16_t* decoded) {
return codec.conceal(num_frames, decoded);
}
int Init() { return 0; }
int PacketDuration(const uint8_t* encoded, size_t encoded_len) const {
return (encoded_len / SPEEX_ENCODED_FRAME_SIZE) * SPEEX_FRAME_SIZE;
}
int PacketDurationRedundant(const uint8_t* encoded, size_t encoded_len) const {
return this->PacketDuration(encoded, encoded_len);
}
bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const {
return 0;
}
};
#endif

View File

@@ -0,0 +1,109 @@
#include "WebRtcJitterBuffer.h"
#define TAG "WebRtcJitterBuffer"
static volatile int running = 0;
WebRtcJitterBuffer::WebRtcJitterBuffer(AudioCodec &codec) :
neteq(NULL), webRtcCodec(codec)
{
running = 1;
}
int WebRtcJitterBuffer::init() {
webrtc::NetEq::Config config;
config.sample_rate_hz = 8000;
neteq = webrtc::NetEq::Create(config);
if (neteq == NULL) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to construct NetEq!");
return -1;
}
if (neteq->RegisterExternalDecoder(&webRtcCodec, webrtc::kDecoderPCMu, 0) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "Failed to register external codec!");
return -1;
}
pthread_t thread;
pthread_create(&thread, NULL, &WebRtcJitterBuffer::collectStats, this);
return 0;
}
WebRtcJitterBuffer::~WebRtcJitterBuffer() {
if (neteq != NULL) {
delete neteq;
}
}
void WebRtcJitterBuffer::addAudio(RtpPacket *packet, uint32_t tick) {
webrtc::WebRtcRTPHeader header;
header.header.payloadType = packet->getPayloadType();
header.header.sequenceNumber = packet->getSequenceNumber();
header.header.timestamp = packet->getTimestamp();
header.header.ssrc = packet->getSsrc();
uint8_t *payload = (uint8_t*)malloc(packet->getPayloadLen());
memcpy(payload, packet->getPayload(), packet->getPayloadLen());
if (neteq->InsertPacket(header, payload, packet->getPayloadLen(), tick) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "neteq->InsertPacket() failed!");
}
}
int WebRtcJitterBuffer::getAudio(short *rawData, int maxRawData) {
int samplesPerChannel = 0;
int numChannels = 0;
if (neteq->GetAudio(maxRawData, rawData, &samplesPerChannel, &numChannels, NULL) != 0) {
__android_log_print(ANDROID_LOG_WARN, TAG, "neteq->GetAudio() failed!");
}
return samplesPerChannel;
}
void WebRtcJitterBuffer::stop() {
running = 0;
}
void WebRtcJitterBuffer::collectStats() {
while (running) {
webrtc::NetEqNetworkStatistics stats;
neteq->NetworkStatistics(&stats);
__android_log_print(ANDROID_LOG_WARN, "WebRtcJitterBuffer",
"Jitter Stats:\n{\n" \
" current_buffer_size_ms: %d,\n" \
" preferred_buffer_size_ms: %d\n" \
" jitter_peaks_found: %d\n" \
" packet_loss_rate: %d\n" \
" packet_discard_rate: %d\n" \
" expand_rate: %d\n" \
" preemptive_rate: %d\n" \
" accelerate_rate: %d\n" \
" clockdrift_ppm: %d\n" \
" added_zero_samples: %d\n" \
"}",
stats.current_buffer_size_ms,
stats.preferred_buffer_size_ms,
stats.jitter_peaks_found,
stats.packet_loss_rate,
stats.packet_discard_rate,
stats.expand_rate,
stats.preemptive_rate,
stats.accelerate_rate,
stats.clockdrift_ppm,
stats.added_zero_samples);
sleep(30);
}
}
void* WebRtcJitterBuffer::collectStats(void *context) {
WebRtcJitterBuffer* jitterBuffer = static_cast<WebRtcJitterBuffer*>(context);
jitterBuffer->collectStats();
return 0;
}

View File

@@ -0,0 +1,35 @@
#ifndef __WEBRTC_JITTER_BUFFER_H__
#define __WEBRTC_JITTER_BUFFER_H__
#include "AudioCodec.h"
#include "WebRtcCodec.h"
#include "RtpPacket.h"
#include <android/log.h>
#include <pthread.h>
#include <unistd.h>
#include <modules/audio_coding/neteq/interface/neteq.h>
#include <modules/interface/module_common_types.h>
class WebRtcJitterBuffer {
private:
webrtc::NetEq *neteq;
WebRtcCodec webRtcCodec;
public:
WebRtcJitterBuffer(AudioCodec &codec);
~WebRtcJitterBuffer();
int init();
void addAudio(RtpPacket *packet, uint32_t tick);
int getAudio(short *rawData, int maxRawData);
void stop();
void collectStats();
static void* collectStats(void *context);
};
#endif