Merge remote-tracking branch 'origin/stable'

This commit is contained in:
Dmytro Bogovych 2022-06-03 08:46:24 +03:00
commit 93d53957a2
1332 changed files with 134626 additions and 292 deletions

View File

@ -88,11 +88,17 @@ if (CMAKE_SYSTEM MATCHES "Darwin*")
add_definitions (-DTARGET_OSX) add_definitions (-DTARGET_OSX)
endif() endif()
if (CMAKE_SYSTEM MATCHES "Android")
message("Adding the Oboe library")
set (OBOE_DIR libs/oboe)
add_subdirectory (${OBOE_DIR} ./oboe)
include_directories (${OBOE_DIR}/include)
endif()
if (USE_MUSL) if (USE_MUSL)
add_definitions(-DTARGET_MUSL) add_definitions(-DTARGET_MUSL)
endif() endif()
if (USE_AQUA_LIB) if (USE_AQUA_LIB)
message("Use AQuA library") message("Use AQuA library")
add_definitions( -DUSE_AQUA_LIBRARY ) add_definitions( -DUSE_AQUA_LIBRARY )
@ -102,7 +108,8 @@ endif()
if (USE_PVQA_LIBRARY) if (USE_PVQA_LIBRARY)
message("Use PVQA libraries") message("Use PVQA libraries")
add_definitions( -DUSE_PVQA_LIBRARY ) add_definitions( -DUSE_PVQA_LIBRARY )
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libs/pvqa/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libs/pvqa/include
${CMAKE_CURRENT_SOURCE_DIR}/libs/pvqa++/include)
endif() endif()
set (RTPHONE_SOURCES set (RTPHONE_SOURCES
@ -201,6 +208,10 @@ else ()
set (LIBS ${LIBS} dl uuid) set (LIBS ${LIBS} dl uuid)
endif () endif ()
if (CMAKE_SYSTEM MATCHES "Android")
set (LIBS ${LIBS} oboe)
endif()
if (USE_AMR_CODEC) if (USE_AMR_CODEC)
set (LIBS ${LIBS}) set (LIBS ${LIBS})
endif (USE_AMR_CODEC) endif (USE_AMR_CODEC)
@ -215,7 +226,7 @@ target_link_libraries(rtphone
uuid uuid
${OPENSSL_SSL} ${OPENSSL_SSL}
${OPENSSL_CRYPTO} ${OPENSSL_CRYPTO}
${LIBS}) ${LIBS} )
target_include_directories(rtphone target_include_directories(rtphone

View File

@ -171,10 +171,13 @@ void AgentImpl::processConfig(JsonCpp::Value &d, JsonCpp::Value &answer)
#endif #endif
std::string transport = d["transport"].asString(); std::string transport = d["transport"].asString();
config()[CONFIG_TRANSPORT] = (transport == "any") ? 0 : (transport == "udp" ? 1 : (transport == "tcp" ? 2 : 3)); config()[CONFIG_TRANSPORT] = (transport == "any") ? TransportType_Any : (transport == "udp" ? TransportType_Udp : (transport == "tcp" ? TransportType_Tcp : TransportType_Tls));
config()[CONFIG_IPV4] = d["ipv4"].asBool(); config()[CONFIG_IPV4] = d["ipv4"].asBool();
config()[CONFIG_IPV6] = d["ipv6"].asBool(); config()[CONFIG_IPV6] = d["ipv6"].asBool();
if (transport == "tls")
config()[CONFIG_SIPS] = true;
// Log file // Log file
std::string logfile = d["logfile"].asString(); std::string logfile = d["logfile"].asString();
ice::Logger& logger = ice::GLogger; ice::Logger& logger = ice::GLogger;
@ -186,10 +189,12 @@ void AgentImpl::processConfig(JsonCpp::Value &d, JsonCpp::Value &answer)
mUseNativeAudio = d["nativeaudio"].asBool(); mUseNativeAudio = d["nativeaudio"].asBool();
config()[CONFIG_OWN_DNS] = d["dns_servers"].asString(); config()[CONFIG_OWN_DNS] = d["dns_servers"].asString();
config()[CONFIG_SIPS] = d["secure"].asBool();
answer["status"] = Status_Ok; answer["status"] = Status_Ok;
} }
void AgentImpl::processStart(JsonCpp::Value& /*request*/, JsonCpp::Value &answer) void AgentImpl::processStart(JsonCpp::Value& request, JsonCpp::Value &answer)
{ {
std::unique_lock<std::recursive_mutex> l(mAgentMutex); std::unique_lock<std::recursive_mutex> l(mAgentMutex);
if (mThread) if (mThread)
@ -198,6 +203,9 @@ void AgentImpl::processStart(JsonCpp::Value& /*request*/, JsonCpp::Value &answer
return; // Started already return; // Started already
} }
// Process config (can be sent via start command as well)
// processConfig(request, answer);
// Start socket thread // Start socket thread
SocketHeap::instance().start(); SocketHeap::instance().start();

View File

@ -25,7 +25,6 @@
namespace Audio namespace Audio
{ {
class AndroidEnumerator: public Enumerator class AndroidEnumerator: public Enumerator
{ {
public: public:

View File

@ -0,0 +1,245 @@
#include "Audio_AndroidOboe.h"
#include "../helper/HL_Sync.h"
#include "../helper/HL_Log.h"
#include <mutex>
#include <iostream>
#include <stdexcept>
#include "../helper/HL_String.h"
#include "../helper/HL_Time.h"
#ifdef TARGET_ANDROID
#define LOG_SUBSYSTEM "Audio"
using namespace Audio;
// -------------------- AndroidEnumerator -----------------------------
AndroidEnumerator::AndroidEnumerator()
{}
AndroidEnumerator::~AndroidEnumerator()
{}
int AndroidEnumerator::indexOfDefaultDevice()
{
return 0;
}
int AndroidEnumerator::count()
{
return 1;
}
int AndroidEnumerator::idAt(int index)
{
return 0;
}
std::string AndroidEnumerator::nameAt(int index)
{
return "Audio";
}
void AndroidEnumerator::open(int direction)
{}
void AndroidEnumerator::close()
{}
// --------------- Input implementation ----------------
AndroidInputDevice::AndroidInputDevice(int devId)
{}
AndroidInputDevice::~AndroidInputDevice()
{
close();
}
bool AndroidInputDevice::open()
{
if (active())
return true;
oboe::AudioStreamBuilder builder;
builder.setDirection(oboe::Direction::Input);
builder.setPerformanceMode(oboe::PerformanceMode::LowLatency);
builder.setSharingMode(oboe::SharingMode::Exclusive);
builder.setFormat(oboe::AudioFormat::I16);
builder.setChannelCount(oboe::ChannelCount::Mono);
builder.setCallback(this);
oboe::Result rescode = builder.openStream(&mRecordingStream);
if (rescode != oboe::Result::OK)
return false;
mDeviceRate = mRecordingStream->getSampleRate();
ICELogInfo(<< "Input Opened with rate " << mDeviceRate);
mActive = true;
rescode = mRecordingStream->requestStart();
if (rescode != oboe::Result::OK)
{
close();
mActive = false;
}
return mActive;
}
void AndroidInputDevice::close()
{
// There is no check for active() value because close() can be called to cleanup after bad open() call.
if (mRecordingStream != nullptr)
{
mRecordingStream->close();
delete mRecordingStream; mRecordingStream = nullptr;
}
mActive = false;
}
oboe::DataCallbackResult
AndroidInputDevice::onAudioReady(oboe::AudioStream *audioStream, void *audioData, int32_t numFrames)
{
std::unique_lock<std::mutex> l(mMutex);
// Send data to AudioPair
if (mConnection)
mConnection->onMicData(getFormat(), audioData, numFrames);
return oboe::DataCallbackResult::Continue;
}
Format AndroidInputDevice::getFormat()
{
return Format(mDeviceRate, 1);
}
bool AndroidInputDevice::active() const
{
return mActive;
}
bool AndroidInputDevice::fakeMode()
{
return false;
}
void AndroidInputDevice::setFakeMode(bool fakemode)
{}
int AndroidInputDevice::readBuffer(void* buffer)
{
throw std::runtime_error("AndroidInputDevice::readBuffer() is not implemented.");
}
// ------------ AndroidOutputDevice -----------------
AndroidOutputDevice::AndroidOutputDevice(int devId)
{
ICELogDebug(<< "Creating AndroidOutputDevice. This is: " << StringHelper::toHex(this));
}
AndroidOutputDevice::~AndroidOutputDevice()
{
ICELogDebug(<< "Deleting AndroidOutputDevice.");
close();
}
bool AndroidOutputDevice::open()
{
std::unique_lock<std::mutex> l(mMutex);
if (mActive)
return true;
mRequestedFrames = 0;
mStartTime = 0.0;
mEndTime = 0.0;
oboe::AudioStreamBuilder builder;
builder.setDirection(oboe::Direction::Output);
builder.setPerformanceMode(oboe::PerformanceMode::LowLatency);
builder.setSharingMode(oboe::SharingMode::Exclusive);
builder.setFormat(oboe::AudioFormat::I16);
builder.setChannelCount(oboe::ChannelCount::Mono);
// builder.setDataCallback(this);
builder.setCallback(this);
//builder.setErrorCallback(this)
oboe::Result rescode = builder.openStream(&mPlayingStream);
if (rescode != oboe::Result::OK)
return false;
mDeviceRate = mPlayingStream->getSampleRate();
ICELogInfo(<< "Input Opened with rate " << mDeviceRate);
mActive = true;
rescode = mPlayingStream->requestStart();
if (rescode != oboe::Result::OK)
{
close();
mActive = false;
}
return mActive;
}
void AndroidOutputDevice::close()
{
std::unique_lock<std::mutex> l(mMutex);
if (!mActive)
return;
if (mPlayingStream != nullptr)
{
mPlayingStream->close();
delete mPlayingStream; mPlayingStream = nullptr;
}
mEndTime = now_ms();
mActive = false;
ICELogInfo(<< "For time " << mEndTime - mStartTime << " ms was requested "
<< float(mRequestedFrames) / getFormat().mRate * 1000 << " ms");
}
Format AndroidOutputDevice::getFormat()
{
return {mDeviceRate, 1};
}
bool AndroidOutputDevice::fakeMode()
{
return false;
}
void AndroidOutputDevice::setFakeMode(bool /*fakemode*/)
{
}
oboe::DataCallbackResult AndroidOutputDevice::onAudioReady(oboe::AudioStream *audioStream, void *audioData, int32_t numFrames)
{
if (mInShutdown)
return oboe::DataCallbackResult::Stop;
if (mStartTime == 0.0)
mStartTime = now_ms();
// Ask producer about data
memset(audioData, 0, numFrames * 2);
if (mConnection)
{
Format f = getFormat();
if (f.mRate != 0)
mConnection->onSpkData(f, audioData, numFrames * 2);
}
mRequestedFrames += numFrames;
return oboe::DataCallbackResult::Continue;
}
// TODO - special case https://github.com/google/oboe/blob/master/docs/notes/disconnect.md
void AndroidOutputDevice::onErrorAfterClose(oboe::AudioStream *stream, oboe::Result result) {
if (result == oboe::Result::ErrorDisconnected) {
// LOGI("Restarting AudioStream after disconnect");
// soundEngine.restart(); // please check oboe samples for soundEngine.restart(); call
}
}
#endif // TARGET_ANDROID

View File

@ -0,0 +1,109 @@
/* Copyright(C) 2007-2017 VoIP objects (voipobjects.com)
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef __AUDIO_ANDROID_OBOE_H
#define __AUDIO_ANDROID_OBOE_H
#ifdef TARGET_ANDROID
#include "Audio_Interface.h"
#include "Audio_Helper.h"
#include "Audio_Resampler.h"
#include "Audio_DataWindow.h"
#include "../helper/HL_Pointer.h"
#include "../helper/HL_ByteBuffer.h"
#include "../helper/HL_Exception.h"
#include "../helper/HL_Statistics.h"
#include <memory>
#include <string>
#include "oboe/Oboe.h"
namespace Audio
{
class AndroidEnumerator: public Enumerator
{
public:
AndroidEnumerator();
~AndroidEnumerator();
void open(int direction);
void close();
int count();
std::string nameAt(int index);
int idAt(int index);
int indexOfDefaultDevice();
protected:
};
class AndroidInputDevice: public InputDevice, public oboe::AudioStreamCallback
{
public:
AndroidInputDevice(int devId);
~AndroidInputDevice();
bool open();
void close();
Format getFormat();
bool fakeMode();
void setFakeMode(bool fakemode);
int readBuffer(void* buffer);
bool active() const;
oboe::DataCallbackResult
onAudioReady(oboe::AudioStream *audioStream, void *audioData, int32_t numFrames);
protected:
bool mActive = false;
oboe::AudioStream* mRecordingStream = nullptr;
PResampler mResampler;
DataWindow mDeviceRateCache, mSdkRateCache;
int mDeviceRate; // Actual rate of opened recorder
int mBufferSize; // Size of buffer used for recording (at native sample rate)
DataWindow mRecorderBuffer;
std::condition_variable mDataCondVar;
int mRecorderBufferIndex;
std::mutex mMutex;
};
class AndroidOutputDevice: public OutputDevice, public oboe::AudioStreamCallback
{
public:
AndroidOutputDevice(int devId);
~AndroidOutputDevice();
bool open();
void close();
Format getFormat();
bool fakeMode();
void setFakeMode(bool fakemode);
oboe::DataCallbackResult onAudioReady(oboe::AudioStream *audioStream, void *audioData, int32_t numFrames);
void onErrorAfterClose(oboe::AudioStream *stream, oboe::Result result);
protected:
std::mutex mMutex;
int mDeviceRate = 0;
oboe::AudioStream* mPlayingStream = nullptr;
DataWindow mPlayBuffer;
int mBufferIndex = 0, mBufferSize = 0;
bool mInShutdown = false;
bool mActive = false;
// Statistics
float mRequestedFrames = 0.0, mStartTime = 0.0, mEndTime = 0.0;
};
}
#endif // TARGET_ANDROID
#endif // __AUDIO_ANDROID_H

View File

@ -56,17 +56,25 @@ void DataWindow::add(const void* data, int length)
if (length > mCapacity) if (length > mCapacity)
{ {
// Use latest bytes from data buffer in this case.
data = (char*)data + length - mCapacity; data = (char*)data + length - mCapacity;
length = mCapacity; length = mCapacity;
} }
// Check how much free space we have
int avail = mCapacity - mFilled; int avail = mCapacity - mFilled;
if (avail < length) if (avail < length)
{ {
memmove(mData, mData + length - avail, mFilled - (length - avail)); // Find the portion of data to move & save
mFilled -= length - avail; int delta = length - avail;
// Move the data
if (mFilled - delta > 0)
memmove(mData, mData + delta, mFilled - delta);
mFilled -= delta;
} }
memcpy(mData + mFilled, data, length); memcpy(mData + mFilled, data, length);
mFilled += length; mFilled += length;
} }

View File

@ -197,7 +197,7 @@ void DevicePair::onSpkData(const Format& f, void* buffer, int length)
// Resample these 10 milliseconds it to native format // Resample these 10 milliseconds it to native format
size_t wasProcessed = 0; size_t wasProcessed = 0;
size_t wasProduced = mSpkResampler.resample(AUDIO_SAMPLERATE, mOutput10msBuffer.data(), mOutput10msBuffer.capacity(), wasProcessed, f.mRate, size_t wasProduced = mSpkResampler.resample(nativeFormat.mRate, mOutput10msBuffer.data(), mOutput10msBuffer.capacity(), wasProcessed, f.mRate,
mOutputNativeData.mutableData() + mOutputNativeData.filled(), mOutputNativeData.capacity() - mOutputNativeData.filled()); mOutputNativeData.mutableData() + mOutputNativeData.filled(), mOutputNativeData.capacity() - mOutputNativeData.filled());
mOutputNativeData.setFilled(mOutputNativeData.filled() + wasProduced); mOutputNativeData.setFilled(mOutputNativeData.filled() + wasProduced);
#ifdef CONSOLE_LOGGING #ifdef CONSOLE_LOGGING
@ -206,7 +206,7 @@ void DevicePair::onSpkData(const Format& f, void* buffer, int length)
} }
} }
assert(mOutputNativeData.filled() >= length); // assert(mOutputNativeData.filled() >= length);
#ifdef DUMP_NATIVEOUTPUT #ifdef DUMP_NATIVEOUTPUT
if (mNativeOutputDump) if (mNativeOutputDump)
mNativeOutputDump->write(mOutputNativeData.data(), length); mNativeOutputDump->write(mOutputNativeData.data(), length);

View File

@ -146,7 +146,8 @@ OsEngine* OsEngine::instance()
#endif #endif
#ifdef TARGET_ANDROID #ifdef TARGET_ANDROID
return &OpenSLEngine::instance(); return nullptr; // As we use Oboe library for now
//return &OpenSLEngine::instance();
#endif #endif
return nullptr; return nullptr;

View File

@ -216,48 +216,52 @@ void Mixer::mix()
channelList[activeCounter++] = &mChannelList[i]; channelList[activeCounter++] = &mChannelList[i];
// No active channels - nothing to mix - exit // No active channels - nothing to mix - exit
if (!activeCounter) if (!activeCounter)
{ {
//ICELogDebug(<< "No active channel"); // ICELogDebug(<< "No active channel");
return; return;
} }
// Optimized versions for 1& 2 active channels // Optimized versions for 1& 2 active channels
if (activeCounter == 1) if (activeCounter == 1)
{ {
// Copy much samples as we have // Copy much samples as we have
Stream& audio = *channelList[0]; Stream& audio = *channelList[0];
mOutput.add(audio.data().data(), audio.data().filled());
audio.data().erase(audio.data().filled()); // Copy the decoded data
mOutput.add(audio.data().data(), audio.data().filled());
// Erase copied audio samples
audio.data().erase(audio.data().filled());
//ICELogSpecial(<<"Length of mixer stream " << audio.data().filled()); //ICELogSpecial(<<"Length of mixer stream " << audio.data().filled());
} }
else else
if (activeCounter == 2) if (activeCounter == 2)
{ {
Stream& audio1 = *channelList[0]; Stream& audio1 = *channelList[0];
Stream& audio2 = *channelList[1]; Stream& audio2 = *channelList[1];
int filled1 = audio1.data().filled() / 2, filled2 = audio2.data().filled() / 2; int filled1 = audio1.data().filled() / 2, filled2 = audio2.data().filled() / 2;
int available = filled1 > filled2 ? filled1 : filled2; int available = filled1 > filled2 ? filled1 : filled2;
// Find how much samples can be mixed // Find how much samples can be mixed
int filled = mOutput.filled() / 2; int filled = mOutput.filled() / 2;
int maxsize = mOutput.capacity() / 2; int maxsize = mOutput.capacity() / 2;
if (maxsize - filled < available) if (maxsize - filled < available)
available = maxsize - filled; available = maxsize - filled;
short sample = 0; short sample = 0;
for (int i=0; i<available; i++) for (int i=0; i<available; i++)
{ {
short sample1 = filled1 > i ? audio1.data().shortAt(i) : 0; short sample1 = filled1 > i ? audio1.data().shortAt(i) : 0;
short sample2 = filled2 > i ? audio2.data().shortAt(i) : 0; short sample2 = filled2 > i ? audio2.data().shortAt(i) : 0;
sample = (abs(sample1) > abs(sample2)) ? sample1 : sample2; sample = (abs(sample1) > abs(sample2)) ? sample1 : sample2;
mOutput.add(sample); mOutput.add(sample);
}
audio1.data().erase(available*2);
audio2.data().erase(available*2);
} }
audio1.data().erase(available*2);
audio2.data().erase(available*2);
}
else else
{ {
do do

View File

@ -1,14 +1,18 @@
/* Copyright(C) 2007-2014 VoIP objects (voipobjects.com) /* Copyright(C) 2007-2021 VoIP objects (voipobjects.com)
* This Source Code Form is subject to the terms of the Mozilla Public * This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "Audio_Player.h" #include "Audio_Player.h"
#include "../helper/HL_Log.h"
#define LOG_SUBSYSTEM "Player"
using namespace Audio; using namespace Audio;
// -------------- Player ----------- // -------------- Player -----------
Player::Player() Player::Player()
:mDelegate(NULL), mPlayedTime(0) :mDelegate(nullptr), mPlayedTime(0)
{ {
} }
@ -47,7 +51,7 @@ void Player::onMicData(const Format& f, const void* buffer, int length)
void Player::onSpkData(const Format& f, void* buffer, int length) void Player::onSpkData(const Format& f, void* buffer, int length)
{ {
Lock l(mGuard); Lock l(mGuard);
// Fill buffer by zero if player owns dedicated device // Fill buffer by zero if player owns dedicated device
if (mOutput) if (mOutput)
memset(buffer, 0, length); memset(buffer, 0, length);
@ -99,7 +103,7 @@ void Player::onFilePlayed()
void Player::obtain(int usage) void Player::obtain(int usage)
{ {
Lock l(mGuard); Lock l(mGuard);
UsageMap::iterator usageIter = mUsage.find(usage); auto usageIter = mUsage.find(usage);
if (usageIter == mUsage.end()) if (usageIter == mUsage.end())
mUsage[usage] = 1; mUsage[usage] = 1;
else else
@ -132,7 +136,7 @@ int Player::releasePlayed()
{ {
Lock l(mGuard); Lock l(mGuard);
int result = mFinishedUsages.size(); int result = mFinishedUsages.size();
while (mFinishedUsages.size()) while (!mFinishedUsages.empty())
{ {
release(mFinishedUsages.front()); release(mFinishedUsages.front());
mFinishedUsages.erase(mFinishedUsages.begin()); mFinishedUsages.erase(mFinishedUsages.begin());

View File

@ -1,4 +1,4 @@
/* Copyright(C) 2007-2014 VoIP objects (voipobjects.com) /* Copyright(C) 2007-2021 VoIP objects (voipobjects.com)
* This Source Code Form is subject to the terms of the Mozilla Public * This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
@ -8,6 +8,7 @@
#include "../helper/HL_Log.h" #include "../helper/HL_Log.h"
#include "../helper/HL_Sync.h" #include "../helper/HL_Sync.h"
#include "../helper/HL_Statistics.h"
#include "Audio_Interface.h" #include "Audio_Interface.h"
#include <deque> #include <deque>
#include <map> #include <map>
@ -48,15 +49,18 @@ namespace Audio
void onMicData(const Format& f, const void* buffer, int length); void onMicData(const Format& f, const void* buffer, int length);
void onSpkData(const Format& f, void* buffer, int length); void onSpkData(const Format& f, void* buffer, int length);
void onFilePlayed(); void onFilePlayed();
void scheduleRelease();
void obtain(int usageId); void obtain(int usageId);
public: public:
Player(); Player();
~Player(); ~Player();
void setDelegate(EndOfAudioDelegate* d); void setDelegate(EndOfAudioDelegate* d);
EndOfAudioDelegate* getDelegate() const; EndOfAudioDelegate* getDelegate() const;
void setOutput(POutputDevice output); void setOutput(POutputDevice output);
POutputDevice getOutput() const; POutputDevice getOutput() const;
void add(int usageId, PWavFileReader file, bool loop, int timelength); void add(int usageId, PWavFileReader file, bool loop, int timelength);
void release(int usageId); void release(int usageId);
void clear(); void clear();

View File

@ -17,7 +17,7 @@
#define AUDIO_CHANNELS 1 #define AUDIO_CHANNELS 1
// Samplerate must be 8 / 16 / 24 / 32 / 48 KHz // Samplerate must be 8 / 16 / 24 / 32 / 48 KHz
#define AUDIO_SAMPLERATE 16000 #define AUDIO_SAMPLERATE 8000
#define AUDIO_MIC_BUFFER_COUNT 16 #define AUDIO_MIC_BUFFER_COUNT 16
#define AUDIO_MIC_BUFFER_LENGTH 10 #define AUDIO_MIC_BUFFER_LENGTH 10
#define AUDIO_MIC_BUFFER_SIZE (AUDIO_MIC_BUFFER_LENGTH * AUDIO_SAMPLERATE / 1000 * 2 * AUDIO_CHANNELS) #define AUDIO_MIC_BUFFER_SIZE (AUDIO_MIC_BUFFER_LENGTH * AUDIO_SAMPLERATE / 1000 * 2 * AUDIO_CHANNELS)
@ -50,7 +50,7 @@
#define MT_MAXRTPPACKET 1500 #define MT_MAXRTPPACKET 1500
#define MT_DTMF_END_PACKETS 3 #define MT_DTMF_END_PACKETS 3
#define RTP_BUFFER_HIGH 480 #define RTP_BUFFER_HIGH 24480
#define RTP_BUFFER_LOW 10 #define RTP_BUFFER_LOW 10
#define RTP_BUFFER_PREBUFFER 80 #define RTP_BUFFER_PREBUFFER 80
#define RTP_DECODED_CAPACITY 2048 #define RTP_DECODED_CAPACITY 2048
@ -66,15 +66,15 @@
// AMR codec defines - it requires USE_AMR_CODEC defined // AMR codec defines - it requires USE_AMR_CODEC defined
// #define USE_AMR_CODEC // #define USE_AMR_CODEC
#define MT_AMRNB_PAYLOADTYPE 122 #define MT_AMRNB_PAYLOADTYPE 112
#define MT_AMRNB_CODECNAME "amr" #define MT_AMRNB_CODECNAME "amr"
#define MT_AMRNB_OCTET_PAYLOADTYPE 123 #define MT_AMRNB_OCTET_PAYLOADTYPE 113
#define MT_AMRWB_PAYLOADTYPE 124 #define MT_AMRWB_PAYLOADTYPE 96
#define MT_AMRWB_CODECNAME "amr-wb" #define MT_AMRWB_CODECNAME "amr-wb"
#define MT_AMRWB_OCTET_PAYLOADTYPE 125 #define MT_AMRWB_OCTET_PAYLOADTYPE 97
#define MT_GSMEFR_PAYLOADTYPE 126 #define MT_GSMEFR_PAYLOADTYPE 126
#define MT_GSMEFR_CODECNAME "GERAN-EFR" #define MT_GSMEFR_CODECNAME "GERAN-EFR"

View File

@ -28,19 +28,19 @@ public:
virtual ~AudioProvider(); virtual ~AudioProvider();
// Returns provider RTP name // Returns provider RTP name
std::string streamName(); std::string streamName() override;
// Returns provider RTP profile name // Returns provider RTP profile name
std::string streamProfile(); std::string streamProfile() override;
// Sets destination IP address // Sets destination IP address
void setDestinationAddress(const RtpPair<InternetAddress>& addr); void setDestinationAddress(const RtpPair<InternetAddress>& addr) override;
// Processes incoming data // Processes incoming data
void processData(PDatagramSocket s, const void* dataBuffer, int dataSize, InternetAddress& source); void processData(PDatagramSocket s, const void* dataBuffer, int dataSize, InternetAddress& source) override;
// This method is called by user agent to send ICE packet from mediasocket // This method is called by user agent to send ICE packet from mediasocket
void sendData(PDatagramSocket s, InternetAddress& destination, const void* dataBuffer, unsigned int datasize); void sendData(PDatagramSocket s, InternetAddress& destination, const void* dataBuffer, unsigned int datasize) override;
// Updates SDP offer // Updates SDP offer
void updateSdpOffer(resip::SdpContents::Session::Medium& sdp, SdpDirection direction) override; void updateSdpOffer(resip::SdpContents::Session::Medium& sdp, SdpDirection direction) override;

View File

@ -93,7 +93,7 @@ void UserAgent::start()
} }
// Initialize resip loggег // Initialize resip loggег
resip::Log::initialize(resip::Log::OnlyExternal, resip::Log::Info, "Client", *this); resip::Log::initialize(resip::Log::OnlyExternal, resip::Log::Debug, "Client", *this);
// Build list of nameservers if specified // Build list of nameservers if specified
resip::DnsStub::NameserverList nslist; resip::DnsStub::NameserverList nslist;
@ -151,7 +151,7 @@ void UserAgent::start()
switch (mConfig[CONFIG_TRANSPORT].asInt()) switch (mConfig[CONFIG_TRANSPORT].asInt())
{ {
case 0: case TransportType_Any:
if (mConfig[CONFIG_IPV4].asBool()) if (mConfig[CONFIG_IPV4].asBool())
{ {
ADD_TRANSPORT4(resip::TCP) ADD_TRANSPORT4(resip::TCP)
@ -166,21 +166,21 @@ void UserAgent::start()
} }
break; break;
case 1: case TransportType_Udp:
if (mConfig[CONFIG_IPV4].asBool()) if (mConfig[CONFIG_IPV4].asBool())
ADD_TRANSPORT4(resip::UDP); ADD_TRANSPORT4(resip::UDP);
if (mConfig[CONFIG_IPV6].asBool()) if (mConfig[CONFIG_IPV6].asBool())
ADD_TRANSPORT6(resip::UDP); ADD_TRANSPORT6(resip::UDP);
break; break;
case 2: case TransportType_Tcp:
if (mConfig[CONFIG_IPV4].asBool()) if (mConfig[CONFIG_IPV4].asBool())
ADD_TRANSPORT4(resip::TCP); ADD_TRANSPORT4(resip::TCP);
if (mConfig[CONFIG_IPV6].asBool()) if (mConfig[CONFIG_IPV6].asBool())
ADD_TRANSPORT6(resip::TCP); ADD_TRANSPORT6(resip::TCP);
break; break;
case 3: case TransportType_Tls:
if (mConfig[CONFIG_IPV4].asBool()) if (mConfig[CONFIG_IPV4].asBool())
ADD_TRANSPORT4(resip::TLS); ADD_TRANSPORT4(resip::TLS);
if (mConfig[CONFIG_IPV6].asBool()) if (mConfig[CONFIG_IPV6].asBool())

View File

@ -60,6 +60,14 @@
#define RESIPROCATE_SUBSYSTEM Subsystem::TEST #define RESIPROCATE_SUBSYSTEM Subsystem::TEST
using namespace std; using namespace std;
enum
{
TransportType_Any,
TransportType_Udp,
TransportType_Tcp,
TransportType_Tls
};
enum enum
{ {
CONFIG_IPV4 = 0, // Use IP4 CONFIG_IPV4 = 0, // Use IP4

View File

@ -25,10 +25,15 @@ NetworkFrame::PacketData NetworkFrame::GetUdpPayloadForEthernet(NetworkFrame::Pa
uint16_t proto = 0; uint16_t proto = 0;
if (ethernet->mEtherType == 129) if (ethernet->mEtherType == 129)
{ {
const VlanHeader* vlan = reinterpret_cast<const VlanHeader*>(packet.mData); // Skip 1 or more VLAN headers
packet.mData += sizeof(VlanHeader); do
packet.mLength -= sizeof(VlanHeader); {
proto = ntohs(vlan->mData); const VlanHeader* vlan = reinterpret_cast<const VlanHeader*>(packet.mData);
packet.mData += sizeof(VlanHeader);
packet.mLength -= sizeof(VlanHeader);
proto = ntohs(vlan->mData);
}
while (proto == 0x8100);
} }
// Skip MPLS headers // Skip MPLS headers

View File

@ -245,6 +245,17 @@ std::string MediaStreamId::getFinishDescription() const
return oss.str(); return oss.str();
} }
MediaStreamId& MediaStreamId::operator = (const MediaStreamId& src)
{
this->mDestination = src.mDestination;
this->mSource = src.mSource;
this->mLinkId = src.mLinkId;
this->mSSRC = src.mSSRC;
this->mSsrcIsId = src.mSsrcIsId;
return *this;
}
std::ostream& operator << (std::ostream& output, const MediaStreamId& id) std::ostream& operator << (std::ostream& output, const MediaStreamId& id)
{ {
return (output << id.toString()); return (output << id.toString());

View File

@ -85,6 +85,7 @@ struct MediaStreamId
std::string toString() const; std::string toString() const;
std::string getDetectDescription() const; std::string getDetectDescription() const;
std::string getFinishDescription() const; std::string getFinishDescription() const;
MediaStreamId& operator = (const MediaStreamId& src);
}; };
std::ostream& operator << (std::ostream& output, const MediaStreamId& id); std::ostream& operator << (std::ostream& output, const MediaStreamId& id);

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,82 @@
#ifndef __HELPER_STATISTICS_H
#define __HELPER_STATISTICS_H
template<typename T>
struct Average
{
int mCount = 0;
T mSum = 0;
T average() const
{
if (!mCount)
return 0;
return mSum / mCount;
}
T value() const
{
return average();
}
void process(T value)
{
mCount++;
mSum += value;
}
};
template<typename T, int minimum = 100000, int maximum = 0, int default_value = 0>
struct TestResult
{
T mMin = minimum;
T mMax = maximum;
Average<T> mAverage;
T mCurrent = default_value;
void process(T value)
{
if (mMin > value)
mMin = value;
if (mMax < value)
mMax = value;
mCurrent = value;
mAverage.process(value);
}
bool is_initialized() const
{
return mAverage.mCount > 0;
}
T current() const
{
if (is_initialized())
return mCurrent;
else
return 0;
}
T value() const
{
return current();
}
T average() const
{
return mAverage.average();
}
TestResult<T>& operator = (T value)
{
process(value);
return *this;
}
operator T()
{
return mCurrent;
}
};
#endif

View File

@ -0,0 +1,10 @@
#include "HL_Time.h"
#include <time.h>
/* return current time in milliseconds */
double now_ms(void) {
struct timespec res;
clock_gettime(CLOCK_MONOTONIC, &res);
return 1000.0 * res.tv_sec + (double) res.tv_nsec / 1e6;
}

View File

@ -0,0 +1,6 @@
#ifndef __HELPER_TIME_H
#define __HELPER_TIME_H
extern double now_ms();
#endif

View File

@ -10,6 +10,7 @@
#include "MT_AudioCodec.h" #include "MT_AudioCodec.h"
#include "MT_CngHelper.h" #include "MT_CngHelper.h"
#include "../helper/HL_Log.h" #include "../helper/HL_Log.h"
#include "../helper/HL_Time.h"
#include "../audio/Audio_Interface.h" #include "../audio/Audio_Interface.h"
#include "../audio/Audio_Resampler.h" #include "../audio/Audio_Resampler.h"
#include <cmath> #include <cmath>
@ -47,11 +48,19 @@ int RtpBuffer::Packet::rate() const
return mRate; return mRate;
} }
const std::vector<short>& RtpBuffer::Packet::pcm() const
{
return mPcm;
}
std::vector<short>& RtpBuffer::Packet::pcm()
{
return mPcm;
}
// ------------ RtpBuffer ---------------- // ------------ RtpBuffer ----------------
RtpBuffer::RtpBuffer(Statistics& stat) RtpBuffer::RtpBuffer(Statistics& stat)
:mStat(stat), mSsrc(0), mHigh(RTP_BUFFER_HIGH), mLow(RTP_BUFFER_LOW), mPrebuffer(RTP_BUFFER_PREBUFFER), :mStat(stat)
mFirstPacketWillGo(true), mReturnedCounter(0), mAddCounter(0),
mFetchedPacket(std::shared_ptr<RTPPacket>(), 0, 0)
{ {
} }
@ -96,19 +105,28 @@ int RtpBuffer::getCount() const
return static_cast<int>(mPacketList.size()); return static_cast<int>(mPacketList.size());
} }
bool SequenceSort(const RtpBuffer::Packet& p1, const RtpBuffer::Packet& p2) bool SequenceSort(const std::shared_ptr<RtpBuffer::Packet>& p1, const std::shared_ptr<RtpBuffer::Packet>& p2)
{ {
return p1.rtp()->GetExtendedSequenceNumber() < p2.rtp()->GetExtendedSequenceNumber(); return p1->rtp()->GetExtendedSequenceNumber() < p2->rtp()->GetExtendedSequenceNumber();
} }
bool RtpBuffer::add(std::shared_ptr<jrtplib::RTPPacket> packet, int timelength, int rate) std::shared_ptr<RtpBuffer::Packet> RtpBuffer::add(std::shared_ptr<jrtplib::RTPPacket> packet, int timelength, int rate)
{ {
if (!packet) if (!packet)
return false; return std::shared_ptr<Packet>();
Lock l(mGuard); Lock l(mGuard);
// Update statistics // Update statistics
if (mLastAddTime == 0.0)
mLastAddTime = now_ms();
else
{
float t = now_ms();
mStat.mPacketInterval.process(t - mLastAddTime);
mLastAddTime = t;
}
mStat.mSsrc = static_cast<uint16_t>(packet->GetSSRC()); mStat.mSsrc = static_cast<uint16_t>(packet->GetSSRC());
// Update jitter // Update jitter
@ -121,15 +139,15 @@ bool RtpBuffer::add(std::shared_ptr<jrtplib::RTPPacket> packet, int timelength,
// New sequence number // New sequence number
unsigned newSeqno = packet->GetExtendedSequenceNumber(); unsigned newSeqno = packet->GetExtendedSequenceNumber();
for (Packet& p: mPacketList) for (std::shared_ptr<Packet>& p: mPacketList)
{ {
unsigned seqno = p.rtp()->GetExtendedSequenceNumber(); unsigned seqno = p->rtp()->GetExtendedSequenceNumber();
if (seqno == newSeqno) if (seqno == newSeqno)
{ {
mStat.mDuplicatedRtp++; mStat.mDuplicatedRtp++;
ICELogMedia(<< "Discovered duplicated packet, skipping"); ICELogMedia(<< "Discovered duplicated packet, skipping");
return false; return std::shared_ptr<Packet>();
} }
if (seqno > maxno) if (seqno > maxno)
@ -143,8 +161,11 @@ bool RtpBuffer::add(std::shared_ptr<jrtplib::RTPPacket> packet, int timelength,
if (newSeqno > minno || (available < mHigh)) if (newSeqno > minno || (available < mHigh))
{ {
Packet p(packet, timelength, rate); // Insert into queue
auto p = std::make_shared<Packet>(packet, timelength, rate);
mPacketList.push_back(p); mPacketList.push_back(p);
// Sort again
std::sort(mPacketList.begin(), mPacketList.end(), SequenceSort); std::sort(mPacketList.begin(), mPacketList.end(), SequenceSort);
// Limit by max timelength // Limit by max timelength
@ -152,21 +173,18 @@ bool RtpBuffer::add(std::shared_ptr<jrtplib::RTPPacket> packet, int timelength,
if (available > mHigh) if (available > mHigh)
ICELogMedia(<< "Available " << available << "ms with limit " << mHigh << "ms"); ICELogMedia(<< "Available " << available << "ms with limit " << mHigh << "ms");
/*while (available > mHigh && mPacketList.size())
{ return p;
ICELogDebug( << "Dropping RTP packet from jitter buffer");
available -= mPacketList.front().timelength();
mPacketList.erase(mPacketList.begin());
}*/
} }
else else
{ {
ICELogMedia(<< "Too old packet, skipping"); ICELogMedia(<< "Too old packet, skipping");
mStat.mOldRtp++; mStat.mOldRtp++;
return false;
return std::shared_ptr<Packet>();
} }
return true; return std::shared_ptr<Packet>();
} }
RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl) RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl)
@ -182,7 +200,7 @@ RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl)
while (total > mHigh && mPacketList.size()) while (total > mHigh && mPacketList.size())
{ {
ICELogMedia( << "Dropping RTP packets from jitter buffer"); ICELogMedia( << "Dropping RTP packets from jitter buffer");
total -= mPacketList.front().timelength(); total -= mPacketList.front()->timelength();
// Save it as last packet however - to not confuse loss packet counter // Save it as last packet however - to not confuse loss packet counter
mFetchedPacket = mPacketList.front(); mFetchedPacket = mPacketList.front();
@ -198,7 +216,11 @@ RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl)
result = FetchResult::NoPacket; result = FetchResult::NoPacket;
else else
{ {
if (mFetchedPacket.rtp()) bool is_fetched_packet = mFetchedPacket.get() != nullptr;
if (is_fetched_packet)
is_fetched_packet &= mFetchedPacket->rtp().get() != nullptr;
if (is_fetched_packet)
{ {
if (mPacketList.empty()) if (mPacketList.empty())
{ {
@ -208,10 +230,10 @@ RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl)
else else
{ {
// Current sequence number ? // Current sequence number ?
unsigned seqno = mPacketList.front().rtp()->GetExtendedSequenceNumber(); unsigned seqno = mPacketList.front()->rtp()->GetExtendedSequenceNumber();
// Gap between new packet and previous on // Gap between new packet and previous on
int gap = seqno - mFetchedPacket.rtp()->GetSequenceNumber() - 1; int gap = seqno - mFetchedPacket->rtp()->GetSequenceNumber() - 1;
gap = std::min(gap, 127); gap = std::min(gap, 127);
if (gap > 0 && mPacketList.empty()) if (gap > 0 && mPacketList.empty())
{ {
@ -228,29 +250,7 @@ RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl)
} }
result = FetchResult::RegularPacket; result = FetchResult::RegularPacket;
Packet& p = mPacketList.front(); rl.push_back(mPacketList.front());
rl.push_back(p.rtp());
// Maybe it is time to replay packet right now ? For case of AMR SID packets
/*if (mFetchedPacket.rtp() && gap == 0 && mFetchedPacket.timelength() >= 10 && p.timelength() >= 10)
{
int timestampDelta;
// Timestamp difference
if (p.rtp()->GetTimestamp() > mFetchedPacket.rtp()->GetTimestamp())
timestampDelta = TimeHelper::getDelta(p.rtp()->GetTimestamp(), mFetchedPacket.rtp()->GetTimestamp());
else
timestampDelta = TimeHelper::getDelta(mFetchedPacket.rtp()->GetTimestamp(), p.rtp()->GetTimestamp());
// Timestamp units per packet
int nrOfPackets = timestampDelta / (p.timelength() * (p.rate() / 1000));
// Add more copies of SID (most probably) packets
for (int i = 0; i < nrOfPackets - 1; i++)
{
//assert(false);
rl.push_back(p.rtp());
}
}*/
// Save last returned normal packet // Save last returned normal packet
mFetchedPacket = mPacketList.front(); mFetchedPacket = mPacketList.front();
@ -269,7 +269,7 @@ RtpBuffer::FetchResult RtpBuffer::fetch(ResultList& rl)
result = FetchResult::RegularPacket; result = FetchResult::RegularPacket;
// Put it to output list // Put it to output list
rl.push_back(mPacketList.front().rtp()); rl.push_back(mPacketList.front());
// Remember returned packet // Remember returned packet
mFetchedPacket = mPacketList.front(); mFetchedPacket = mPacketList.front();
@ -295,7 +295,7 @@ int RtpBuffer::findTimelength()
{ {
int available = 0; int available = 0;
for (unsigned i = 0; i < mPacketList.size(); i++) for (unsigned i = 0; i < mPacketList.size(); i++)
available += mPacketList[i].timelength(); available += mPacketList[i]->timelength();
return available; return available;
} }
@ -321,7 +321,7 @@ Receiver::~Receiver()
//-------------- AudioReceiver ---------------- //-------------- AudioReceiver ----------------
AudioReceiver::AudioReceiver(const CodecList::Settings& settings, MT::Statistics &stat) AudioReceiver::AudioReceiver(const CodecList::Settings& settings, MT::Statistics &stat)
:Receiver(stat), mBuffer(stat), mFrameCount(0), mFailedCount(0), mCodecSettings(settings), :Receiver(stat), mBuffer(stat), mCodecSettings(settings),
mCodecList(settings) mCodecList(settings)
{ {
// Init resamplers // Init resamplers
@ -354,9 +354,45 @@ AudioReceiver::~AudioReceiver()
mDecodedDump.reset(); mDecodedDump.reset();
} }
size_t decode_packet(Codec& codec, RTPPacket& p, void* output_buffer, size_t output_capacity)
{
// How much data was produced
size_t result = 0;
// Handle here regular RTP packets
// Check if payload length is ok
int tail = codec.rtpLength() ? p.GetPayloadLength() % codec.rtpLength() : 0;
if (!tail)
{
// Find number of frames
int frame_count = codec.rtpLength() ? p.GetPayloadLength() / codec.rtpLength() : 1;
int frame_length = codec.rtpLength() ? codec.rtpLength() : (int)p.GetPayloadLength();
// Save last packet time length
// mLastPacketTimeLength = mFrameCount * mCodec->frameTime();
// Decode
for (int i=0; i < frame_count; i++)
{
auto decoded_length = codec.decode(p.GetPayloadData() + i * codec.rtpLength(),
frame_length,
output_buffer,
output_capacity);
result += decoded_length;
}
}
else
ICELogMedia(<< "RTP packet with tail.");
return result;
}
bool AudioReceiver::add(const std::shared_ptr<jrtplib::RTPPacket>& p, Codec** codec) bool AudioReceiver::add(const std::shared_ptr<jrtplib::RTPPacket>& p, Codec** codec)
{ {
// ICELogInfo(<< "Adding packet No " << p->GetSequenceNumber());
// Increase codec counter // Increase codec counter
mStat.mCodecCount[p->GetPayloadType()]++; mStat.mCodecCount[p->GetPayloadType()]++;
@ -410,7 +446,22 @@ bool AudioReceiver::add(const std::shared_ptr<jrtplib::RTPPacket>& p, Codec** co
} }
// Queue packet to buffer // Queue packet to buffer
return mBuffer.add(p, time_length, codecIter->second->samplerate()); auto packet = mBuffer.add(p, time_length, codecIter->second->samplerate()).get();
if (packet)
{
// Check if early decoding configured
if (mEarlyDecode && *codec)
{
// Move data to packet buffer
size_t available = decode_packet(**codec, *p, mDecodedFrame, sizeof mDecodedFrame);
packet->pcm().resize(available / 2);
memcpy(packet->pcm().data(), mDecodedFrame, available / 2);
}
return true;
}
else
return false;
} }
void AudioReceiver::processDecoded(Audio::DataWindow& output, int options) void AudioReceiver::processDecoded(Audio::DataWindow& output, int options)
@ -435,7 +486,7 @@ void AudioReceiver::processDecoded(Audio::DataWindow& output, int options)
bool AudioReceiver::getAudio(Audio::DataWindow& output, int options, int* rate) bool AudioReceiver::getAudio(Audio::DataWindow& output, int options, int* rate)
{ {
bool result = false; bool result = false, /*had_cng = false, */had_decode = false;
// Get next packet from buffer // Get next packet from buffer
RtpBuffer::ResultList rl; RtpBuffer::ResultList rl;
@ -443,7 +494,7 @@ bool AudioReceiver::getAudio(Audio::DataWindow& output, int options, int* rate)
switch (fr) switch (fr)
{ {
case RtpBuffer::FetchResult::Gap: case RtpBuffer::FetchResult::Gap:
ICELogInfo(<< "Gap detected."); ICELogDebug(<< "Gap detected.");
mDecodedLength = mResampledLength = 0; mDecodedLength = mResampledLength = 0;
if (mCngPacket && mCodec) if (mCngPacket && mCodec)
@ -482,14 +533,14 @@ bool AudioReceiver::getAudio(Audio::DataWindow& output, int options, int* rate)
case RtpBuffer::FetchResult::RegularPacket: case RtpBuffer::FetchResult::RegularPacket:
mFailedCount = 0; mFailedCount = 0;
for (std::shared_ptr<RTPPacket>& p: rl) for (std::shared_ptr<RtpBuffer::Packet>& p: rl)
{ {
assert(p); assert(p);
// Check if previously CNG packet was detected. Emit CNG audio here if needed. // Check if previously CNG packet was detected. Emit CNG audio here if needed.
if (options & DecodeOptions_FillCngGap && mCngPacket && mCodec) if (options & DecodeOptions_FillCngGap && mCngPacket && mCodec)
{ {
// Fill CNG audio is server mode is present // Fill CNG audio is server mode is present
int units = p->GetTimestamp() - mCngPacket->GetTimestamp(); int units = p->rtp()->GetTimestamp() - mCngPacket->GetTimestamp();
int milliseconds = units / (mCodec->samplerate() / 1000); int milliseconds = units / (mCodec->samplerate() / 1000);
if (milliseconds > mLastPacketTimeLength) if (milliseconds > mLastPacketTimeLength)
{ {
@ -522,69 +573,83 @@ bool AudioReceiver::getAudio(Audio::DataWindow& output, int options, int* rate)
} }
} }
// Find codec if (mEarlyDecode)
mCodec = mCodecMap[p->GetPayloadType()];
if (mCodec)
{ {
if (rate) // ToDo - copy the decoded data to output buffer
*rate = mCodec->samplerate();
// Check if it is CNG packet }
if ((p->GetPayloadType() == 0 || p->GetPayloadType() == 8) && p->GetPayloadLength() >= 1 && p->GetPayloadLength() <= 6) else
{
// Find codec by payload type
int ptype = p->rtp()->GetPayloadType();
mCodec = mCodecMap[ptype];
if (mCodec)
{ {
if (options & DecodeOptions_SkipDecode) if (rate)
mDecodedLength = 0; *rate = mCodec->samplerate();
else
// Check if it is CNG packet
if ((ptype == 0 || ptype == 8) && p->rtp()->GetPayloadLength() >= 1 && p->rtp()->GetPayloadLength() <= 6)
{ {
mCngPacket = p; if (options & DecodeOptions_SkipDecode)
mCngDecoder.decode3389(p->GetPayloadData(), p->GetPayloadLength()); mDecodedLength = 0;
// Emit CNG mLastPacketLength milliseconds else
mDecodedLength = mCngDecoder.produce(mCodec->samplerate(), mLastPacketTimeLength,
(short*)mDecodedFrame, true);
if (mDecodedLength)
processDecoded(output, options);
}
result = true;
}
else
{
// Reset CNG packet
mCngPacket.reset();
// Handle here regular RTP packets
// Check if payload length is ok
int tail = mCodec->rtpLength() ? p->GetPayloadLength() % mCodec->rtpLength() : 0;
if (!tail)
{
// Find number of frames
mFrameCount = mCodec->rtpLength() ? p->GetPayloadLength() / mCodec->rtpLength() : 1;
int frameLength = mCodec->rtpLength() ? mCodec->rtpLength() : (int)p->GetPayloadLength();
// Save last packet time length
mLastPacketTimeLength = mFrameCount * mCodec->frameTime();
// Decode
for (int i=0; i<mFrameCount && !mCodecSettings.mSkipDecode; i++)
{ {
if (options & DecodeOptions_SkipDecode) mCngPacket = p->rtp();
mDecodedLength = 0; mCngDecoder.decode3389(p->rtp()->GetPayloadData(), p->rtp()->GetPayloadLength());
else // Emit CNG mLastPacketLength milliseconds
{ mDecodedLength = mCngDecoder.produce(mCodec->samplerate(), mLastPacketTimeLength,
// Decode frame by frame (short*)mDecodedFrame, true);
mDecodedLength = mCodec->decode(p->GetPayloadData() + i*mCodec->rtpLength(), if (mDecodedLength)
frameLength, mDecodedFrame, sizeof mDecodedFrame); processDecoded(output, options);
if (mDecodedLength)
processDecoded(output, options);
}
} }
result = mFrameCount > 0; result = true;
// Check for bitrate counter
processStatisticsWithAmrCodec(mCodec.get());
} }
else else
ICELogMedia(<< "RTP packet with tail."); {
// Reset CNG packet
mCngPacket.reset();
// Handle here regular RTP packets
// Check if payload length is ok
int tail = mCodec->rtpLength() ? p->rtp()->GetPayloadLength() % mCodec->rtpLength() : 0;
if (!tail)
{
// Find number of frames
mFrameCount = mCodec->rtpLength() ? p->rtp()->GetPayloadLength() / mCodec->rtpLength() : 1;
int frameLength = mCodec->rtpLength() ? mCodec->rtpLength() : (int)p->rtp()->GetPayloadLength();
// Save last packet time length
mLastPacketTimeLength = mFrameCount * mCodec->frameTime();
// Decode
for (int i=0; i<mFrameCount && !mCodecSettings.mSkipDecode; i++)
{
if (options & DecodeOptions_SkipDecode)
mDecodedLength = 0;
else
{
// Trigger the statistics
had_decode = true;
// Decode frame by frame
mDecodedLength = mCodec->decode(p->rtp()->GetPayloadData() + i * mCodec->rtpLength(),
frameLength, mDecodedFrame, sizeof mDecodedFrame);
// mDecodedLength = 3840; // Opus 20 ms stereo
if (mDecodedLength)
processDecoded(output, options);
}
}
result = mFrameCount > 0;
// Check for bitrate counter
processStatisticsWithAmrCodec(mCodec.get());
}
else
ICELogMedia(<< "RTP packet with tail.");
}
} }
} }
} }
@ -594,6 +659,18 @@ bool AudioReceiver::getAudio(Audio::DataWindow& output, int options, int* rate)
assert(0); assert(0);
} }
if (had_decode)
{
// mStat.mDecodeRequested++;
if (mLastDecodeTime == 0.0)
mLastDecodeTime = now_ms();
else
{
float t = now_ms();
mStat.mDecodingInterval.process(t - mLastDecodeTime);
mLastDecodeTime = t;
}
}
return result; return result;
} }
@ -670,12 +747,12 @@ void AudioReceiver::updatePvqa(const void *data, int size)
mPvqaBuffer->addZero(size); mPvqaBuffer->addZero(size);
Audio::Format fmt; Audio::Format fmt;
int frames = (int)fmt.timeFromSize(mPvqaBuffer->filled()) / (PVQA_INTERVAL * 1000); int frames = static_cast<int>(fmt.timeFromSize(mPvqaBuffer->filled())) / (PVQA_INTERVAL * 1000);
if (frames > 0) if (frames > 0)
{ {
int time4pvqa = (int)(frames * PVQA_INTERVAL * 1000); int time4pvqa = (int)(frames * PVQA_INTERVAL * 1000);
int size4pvqa = (int)fmt.sizeFromTime(time4pvqa); int size4pvqa = (int)fmt.sizeFromTime(time4pvqa);
ICELogInfo(<< "PVQA buffer has " << time4pvqa << " milliseconds of audio."); ICELogDebug(<< "PVQA buffer has " << time4pvqa << " milliseconds of audio.");
mPVQA->update(mPvqaBuffer->data(), size4pvqa); mPVQA->update(mPvqaBuffer->data(), size4pvqa);
mPvqaBuffer->erase(size4pvqa); mPvqaBuffer->erase(size4pvqa);
} }

View File

@ -47,12 +47,17 @@ namespace MT
public: public:
Packet(const std::shared_ptr<RTPPacket>& packet, int timelen, int rate); Packet(const std::shared_ptr<RTPPacket>& packet, int timelen, int rate);
std::shared_ptr<RTPPacket> rtp() const; std::shared_ptr<RTPPacket> rtp() const;
int timelength() const; int timelength() const;
int rate() const; int rate() const;
const std::vector<short>& pcm() const;
std::vector<short>& pcm();
protected: protected:
std::shared_ptr<RTPPacket> mRtp; std::shared_ptr<RTPPacket> mRtp;
int mTimelength = 0, mRate = 0; int mTimelength = 0, mRate = 0;
std::vector<short> mPcm;
}; };
RtpBuffer(Statistics& stat); RtpBuffer(Statistics& stat);
@ -60,35 +65,48 @@ namespace MT
unsigned ssrc(); unsigned ssrc();
void setSsrc(unsigned ssrc); void setSsrc(unsigned ssrc);
void setHigh(int milliseconds); void setHigh(int milliseconds);
int high(); int high();
void setLow(int milliseconds); void setLow(int milliseconds);
int low(); int low();
void setPrebuffer(int milliseconds); void setPrebuffer(int milliseconds);
int prebuffer(); int prebuffer();
int getNumberOfReturnedPackets() const; int getNumberOfReturnedPackets() const;
int getNumberOfAddPackets() const; int getNumberOfAddPackets() const;
int findTimelength(); int findTimelength();
int getCount() const; int getCount() const;
// Returns false if packet was not add - maybe too old or too new or duplicate
bool add(std::shared_ptr<RTPPacket> packet, int timelength, int rate);
typedef std::vector<std::shared_ptr<RTPPacket>> ResultList; // Returns false if packet was not add - maybe too old or too new or duplicate
std::shared_ptr<Packet> add(std::shared_ptr<RTPPacket> packet, int timelength, int rate);
typedef std::vector<std::shared_ptr<Packet>> ResultList;
typedef std::shared_ptr<ResultList> PResultList; typedef std::shared_ptr<ResultList> PResultList;
FetchResult fetch(ResultList& rl); FetchResult fetch(ResultList& rl);
protected: protected:
unsigned mSsrc; unsigned mSsrc = 0;
int mHigh, mLow, mPrebuffer; int mHigh = RTP_BUFFER_HIGH,
int mReturnedCounter, mAddCounter; mLow = RTP_BUFFER_LOW,
mPrebuffer = RTP_BUFFER_PREBUFFER;
int mReturnedCounter = 0,
mAddCounter = 0;
mutable Mutex mGuard; mutable Mutex mGuard;
typedef std::vector<Packet> PacketList; typedef std::vector<std::shared_ptr<Packet>> PacketList;
PacketList mPacketList; PacketList mPacketList;
Statistics& mStat; Statistics& mStat;
bool mFirstPacketWillGo; bool mFirstPacketWillGo = true;
jrtplib::RTPSourceStats mRtpStats; jrtplib::RTPSourceStats mRtpStats;
Packet mFetchedPacket; std::shared_ptr<Packet> mFetchedPacket;
// To calculate average interval between packet add. It is close to jitter but more useful in debugging.
float mLastAddTime = 0.0;
}; };
class Receiver class Receiver
@ -146,6 +164,9 @@ namespace MT
std::shared_ptr<jrtplib::RTPPacket> mCngPacket; std::shared_ptr<jrtplib::RTPPacket> mCngPacket;
CngDecoder mCngDecoder; CngDecoder mCngDecoder;
// Decode RTP early, do not wait for speaker callback
bool mEarlyDecode = false;
// Buffer to hold decoded data // Buffer to hold decoded data
char mDecodedFrame[65536]; char mDecodedFrame[65536];
int mDecodedLength = 0; int mDecodedLength = 0;
@ -161,12 +182,17 @@ namespace MT
// Last packet time length // Last packet time length
int mLastPacketTimeLength = 0; int mLastPacketTimeLength = 0;
int mFailedCount; int mFailedCount = 0;
Audio::Resampler mResampler8, mResampler16, Audio::Resampler mResampler8, mResampler16,
mResampler32, mResampler48; mResampler32, mResampler48;
Audio::PWavFileWriter mDecodedDump; Audio::PWavFileWriter mDecodedDump;
float mLastDecodeTime = 0.0; // Time last call happened to codec->decode()
float mIntervalSum = 0.0;
int mIntervalCount = 0;
// Zero rate will make audio mono but resampling will be skipped // Zero rate will make audio mono but resampling will be skipped
void makeMonoAndResample(int rate, int channels); void makeMonoAndResample(int rate, int channels);

View File

@ -97,7 +97,7 @@ AudioStream::~AudioStream()
if (mFinalStatistics) if (mFinalStatistics)
*mFinalStatistics = mStat; *mFinalStatistics = mStat;
ICELogInfo(<< mStat.toShortString()); ICELogInfo(<< mStat.toString());
} }
void AudioStream::setDestination(const RtpPair<InternetAddress>& dest) void AudioStream::setDestination(const RtpPair<InternetAddress>& dest)

View File

@ -34,7 +34,7 @@ Terminal::~Terminal()
mAudioPair.reset(); mAudioPair.reset();
} }
PStream Terminal::createStream(int type, VariantMap& config) PStream Terminal::createStream(int type, VariantMap& /*config*/)
{ {
PStream result; PStream result;
switch (type) switch (type)
@ -52,7 +52,7 @@ PStream Terminal::createStream(int type, VariantMap& config)
return result; return result;
} }
void Terminal::freeStream(PStream stream) void Terminal::freeStream(const PStream& stream)
{ {
if (AudioStream* audio = dynamic_cast<AudioStream*>(stream.get())) if (AudioStream* audio = dynamic_cast<AudioStream*>(stream.get()))
{ {

View File

@ -26,10 +26,11 @@ namespace MT
CodecList& codeclist(); CodecList& codeclist();
PStream createStream(int type, VariantMap& config); PStream createStream(int type, VariantMap& config);
void freeStream(PStream s); void freeStream(const PStream& s);
Audio::PDevicePair audio(); Audio::PDevicePair audio();
void setAudio(const Audio::PDevicePair& audio); void setAudio(const Audio::PDevicePair& audio);
protected: protected:
StreamList mAudioList; StreamList mAudioList;
std::mutex mAudioListMutex; std::mutex mAudioListMutex;

View File

@ -63,8 +63,13 @@ public:
virtual int channels() { return 1; } virtual int channels() { return 1; }
// Returns size of encoded data (RTP) in bytes
virtual int encode(const void* input, int inputBytes, void* output, int outputCapacity) = 0; virtual int encode(const void* input, int inputBytes, void* output, int outputCapacity) = 0;
// Returns size of decoded data (PCM signed short) in bytes
virtual int decode(const void* input, int inputBytes, void* output, int outputCapacity) = 0; virtual int decode(const void* input, int inputBytes, void* output, int outputCapacity) = 0;
// Returns size of produced data (PCM signed short) in bytes
virtual int plc(int lostFrames, void* output, int outputCapacity) = 0; virtual int plc(int lostFrames, void* output, int outputCapacity) = 0;
// Returns size of codec in memory // Returns size of codec in memory

View File

@ -25,6 +25,42 @@ using namespace MT;
using strx = StringHelper; using strx = StringHelper;
// ---------------- EvsSpec --------------- // ---------------- EvsSpec ---------------
std::string CodecList::Settings::toString() const
{
std::ostringstream oss;
oss << "wrap IuUP: " << mWrapIuUP << std::endl
<< "skip decode: " << mSkipDecode << std::endl;
for (int ptype: mAmrWbPayloadType)
oss << "AMR WB ptype: " << ptype << std::endl;
for (int ptype: mAmrWbOctetPayloadType)
oss << "AMR WB octet-aligned ptype: " << ptype << std::endl;
for (int ptype: mAmrNbPayloadType)
oss << "AMR NB ptype: " << ptype << std::endl;
for (int ptype: mAmrNbOctetPayloadType)
oss << "AMR NB octet-aligned ptype:" << ptype << std::endl;
oss << "ISAC 16Khz ptype: " << mIsac16KPayloadType << std::endl
<< "ISAC 32Khz ptype: " << mIsac32KPayloadType << std::endl
<< "iLBC 20ms ptype: " << mIlbc20PayloadType << std::endl
<< "iLBC 30ms ptype: " << mIlbc30PayloadType << std::endl
<< "GSM FR ptype: " << mGsmFrPayloadType << ", GSM FR plength: " << mGsmFrPayloadLength << std::endl
<< "GSM HR ptype: " << mGsmHrPayloadType << std::endl
<< "GSM EFR ptype: " << mGsmEfrPayloadType << std::endl;
for (auto& spec: mEvsSpec)
{
oss << "EVS ptype: " << spec.mPayloadType << ", bw: " << spec.mBandwidth << ", enc: " << (spec.mEncodingType == EvsSpec::Encoding_MIME ? "mime" : "g192") << std::endl;
}
for (auto& spec: mOpusSpec)
{
oss << "OPUS ptype: " << spec.mPayloadType << ", rate: " << spec.mRate << ", channels: " << spec.mChannels << std::endl;
}
return oss.str();
}
bool CodecList::Settings::EvsSpec::isValid() const bool CodecList::Settings::EvsSpec::isValid() const
{ {
return mPayloadType >= 96 && mPayloadType <= 127; return mPayloadType >= 96 && mPayloadType <= 127;
@ -125,7 +161,6 @@ CodecList::CodecList(const Settings& settings)
mFactoryList.push_back(new GsmEfrCodec::GsmEfrFactory(mSettings.mWrapIuUP, mSettings.mGsmEfrPayloadType)); mFactoryList.push_back(new GsmEfrCodec::GsmEfrFactory(mSettings.mWrapIuUP, mSettings.mGsmEfrPayloadType));
#endif #endif
#endif #endif
mFactoryList.push_back(new IsacCodec::IsacFactory16K(mSettings.mIsac16KPayloadType)); mFactoryList.push_back(new IsacCodec::IsacFactory16K(mSettings.mIsac16KPayloadType));
mFactoryList.push_back(new IlbcCodec::IlbcFactory(mSettings.mIlbc20PayloadType, mSettings.mIlbc30PayloadType)); mFactoryList.push_back(new IlbcCodec::IlbcFactory(mSettings.mIlbc20PayloadType, mSettings.mIlbc30PayloadType));
mFactoryList.push_back(new G711Codec::AlawFactory()); mFactoryList.push_back(new G711Codec::AlawFactory());

View File

@ -82,6 +82,9 @@ public:
}; };
std::vector<OpusSpec> mOpusSpec; std::vector<OpusSpec> mOpusSpec;
// Textual representation - used in logging
std::string toString() const;
static Settings DefaultSettings; static Settings DefaultSettings;
}; };

View File

@ -19,6 +19,7 @@ SingleAudioStream::SingleAudioStream(const CodecList::Settings& codecSettings, S
SingleAudioStream::~SingleAudioStream() SingleAudioStream::~SingleAudioStream()
{ {
} }
void SingleAudioStream::process(const std::shared_ptr<jrtplib::RTPPacket>& packet) void SingleAudioStream::process(const std::shared_ptr<jrtplib::RTPPacket>& packet)

View File

@ -191,6 +191,9 @@ Statistics& Statistics::operator += (const Statistics& src)
mJitter = src.mJitter; mJitter = src.mJitter;
mRttDelay = src.mRttDelay; mRttDelay = src.mRttDelay;
mDecodingInterval = src.mDecodingInterval;
mDecodeRequested = src.mDecodeRequested;
if (!src.mCodecName.empty()) if (!src.mCodecName.empty())
mCodecName = src.mCodecName; mCodecName = src.mCodecName;
@ -239,13 +242,16 @@ Statistics& Statistics::operator -= (const Statistics& src)
} }
std::string Statistics::toShortString() const std::string Statistics::toString() const
{ {
std::ostringstream oss; std::ostringstream oss;
oss << "Received: " << mReceivedRtp oss << "Received: " << mReceivedRtp
<< ", lost: " << mPacketLoss << ", lost: " << mPacketLoss
<< ", dropped: " << mPacketDropped << ", dropped: " << mPacketDropped
<< ", sent: " << mSentRtp; << ", sent: " << mSentRtp
<< ", decoding interval: " << mDecodingInterval.average()
<< ", decode requested: " << mDecodeRequested.average()
<< ", packet interval: " << mPacketInterval.average();
return oss.str(); return oss.str();
} }

View File

@ -6,6 +6,8 @@
#include "audio/Audio_DataWindow.h" #include "audio/Audio_DataWindow.h"
#include "helper/HL_Optional.hpp" #include "helper/HL_Optional.hpp"
#include "helper/HL_Statistics.h"
#include "jrtplib/src/rtptimeutilities.h" #include "jrtplib/src/rtptimeutilities.h"
#include "jrtplib/src/rtppacket.h" #include "jrtplib/src/rtppacket.h"
@ -13,78 +15,6 @@ using std::experimental::optional;
namespace MT namespace MT
{ {
template<typename T>
struct Average
{
int mCount = 0;
T mSum = 0;
T average() const
{
if (!mCount)
return 0;
return mSum / mCount;
}
T value() const
{
return average();
}
void process(T value)
{
mCount++;
mSum += value;
}
};
template<typename T, int minimum = 100000, int maximum = 0, int default_value = 0>
struct TestResult
{
T mMin = minimum;
T mMax = maximum;
Average<T> mAverage;
T mCurrent = default_value;
void process(T value)
{
if (mMin > value)
mMin = value;
if (mMax < value)
mMax = value;
mCurrent = value;
mAverage.process(value);
}
bool is_initialized() const
{
return mAverage.mCount > 0;
}
T current() const
{
if (is_initialized())
return mCurrent;
else
return 0;
}
T value() const
{
return current();
}
TestResult<T>& operator = (T value)
{
process(value);
return *this;
}
operator T()
{
return mCurrent;
}
};
template<typename T> template<typename T>
struct StreamStats struct StreamStats
@ -130,9 +60,13 @@ public:
mDuplicatedRtp, // Number of received duplicated rtp packets mDuplicatedRtp, // Number of received duplicated rtp packets
mOldRtp, // Number of late rtp packets mOldRtp, // Number of late rtp packets
mPacketLoss, // Number of lost packets mPacketLoss, // Number of lost packets
mPacketDropped, // Number of dropped packets (due to time unsync when playing) mPacketDropped, // Number of dropped packets (due to time unsync when playing)б
mIllegalRtp; // Number of rtp packets with bad payload type mIllegalRtp; // Number of rtp packets with bad payload type
TestResult<float> mDecodingInterval, // Average interval on call to packet decode
mDecodeRequested, // Average amount of requested audio frames to play
mPacketInterval; // Average interval between packet adding to jitter buffer
int mLoss[128]; // Every item is number of loss of corresping length int mLoss[128]; // Every item is number of loss of corresping length
size_t mAudioTime; // Decoded/found time in milliseconds size_t mAudioTime; // Decoded/found time in milliseconds
uint16_t mSsrc; // Last known SSRC ID in a RTP stream uint16_t mSsrc; // Last known SSRC ID in a RTP stream
@ -169,7 +103,7 @@ public:
std::string mPvqaReport; std::string mPvqaReport;
#endif #endif
std::string toShortString() const; std::string toString() const;
}; };
} // end of namespace MT } // end of namespace MT

View File

@ -21,7 +21,7 @@ Stream::Stream()
Stream::~Stream() Stream::~Stream()
{ {
ICELogInfo(<< mStat.toString());
} }
void Stream::setDestination(const RtpPair<InternetAddress>& dest) void Stream::setDestination(const RtpPair<InternetAddress>& dest)
@ -77,13 +77,13 @@ StreamList::~StreamList()
clear(); clear();
} }
void StreamList::add(PStream s) void StreamList::add(const PStream& s)
{ {
Lock l(mMutex); Lock l(mMutex);
mStreamVector.push_back(s); mStreamVector.push_back(s);
} }
void StreamList::remove(PStream s) void StreamList::remove(const PStream& s)
{ {
Lock l(mMutex); Lock l(mMutex);
@ -98,7 +98,7 @@ void StreamList::clear()
mStreamVector.clear(); mStreamVector.clear();
} }
bool StreamList::has(PStream s) bool StreamList::has(const PStream& s)
{ {
Lock l(mMutex); Lock l(mMutex);
return std::find(mStreamVector.begin(), mStreamVector.end(), s) != mStreamVector.end(); return std::find(mStreamVector.begin(), mStreamVector.end(), s) != mStreamVector.end();
@ -127,4 +127,4 @@ void StreamList::copyTo(StreamList* sl)
Mutex& StreamList::getMutex() Mutex& StreamList::getMutex()
{ {
return mMutex; return mMutex;
} }

View File

@ -88,10 +88,10 @@ namespace MT
StreamList(); StreamList();
~StreamList(); ~StreamList();
void add(PStream s); void add(const PStream& s);
void remove(PStream s); void remove(const PStream& s);
void clear(); void clear();
bool has(PStream s); bool has(const PStream& s);
int size(); int size();
PStream streamAt(int index); PStream streamAt(int index);

View File

@ -24,7 +24,7 @@
# include <ws2tcpip.h> # include <ws2tcpip.h>
#else #else
# include <netinet/in.h> # include <netinet/in.h>
# if /*defined(TARGET_LINUX) || */ defined(TARGET_ANDROID) # if /*defined(TARGET_LINUX) || */ defined(TARGET_ANDROID)
# include <linux/in6.h> # include <linux/in6.h>
# endif # endif
@ -465,15 +465,15 @@ unsigned char* NetworkAddress::ipBytes() const
#endif #endif
case AF_INET6: case AF_INET6:
#ifdef TARGET_WIN #ifdef TARGET_WIN
return (unsigned char*)mAddr6.sin6_addr.u.Byte; return (unsigned char*)mAddr6.sin6_addr.u.Byte;
#elif defined(TARGET_OSX) || defined(TARGET_IOS) #elif defined(TARGET_OSX) || defined(TARGET_IOS)
return (unsigned char*)&mAddr6.sin6_addr.__u6_addr.__u6_addr8; return (unsigned char*)&mAddr6.sin6_addr.__u6_addr.__u6_addr8;
#elif defined(TARGET_OPENWRT) || defined(TARGET_MUSL) #elif defined(TARGET_OPENWRT) || defined(TARGET_MUSL)
return (unsigned char*)&mAddr6.sin6_addr.__in6_union.__s6_addr; return (unsigned char*)&mAddr6.sin6_addr.__in6_union.__s6_addr;
#elif defined(TARGET_LINUX) #elif defined(TARGET_LINUX)
return (unsigned char*)&mAddr6.sin6_addr.__in6_u.__u6_addr8; return (unsigned char*)&mAddr6.sin6_addr.__in6_u.__u6_addr8;
#elif defined(TARGET_ANDROID) #elif defined(TARGET_ANDROID)
return (unsigned char*)&mAddr6.sin6_addr.in6_u.u6_addr8; return (unsigned char*)&mAddr6.sin6_addr.in6_u.u6_addr8;
#endif #endif
} }
assert(0); assert(0);
@ -783,3 +783,12 @@ bool NetworkAddress::isSame(const NetworkAddress& a1, const NetworkAddress& a2)
} }
return false; return false;
} }
NetworkAddress& NetworkAddress::operator = (const NetworkAddress& src)
{
this->mInitialized = src.mInitialized;
this->mRelayed = src.mRelayed;
this->mAddr6 = src.mAddr6;
return *this;
}

View File

@ -75,7 +75,9 @@ namespace ice
static bool isSameHost(const NetworkAddress& a1, const NetworkAddress& a2); static bool isSameHost(const NetworkAddress& a1, const NetworkAddress& a2);
static bool isSame(const NetworkAddress& a1, const NetworkAddress& a2); static bool isSame(const NetworkAddress& a1, const NetworkAddress& a2);
NetworkAddress& operator = (const NetworkAddress& src);
bool operator == (const NetworkAddress& rhs) const; bool operator == (const NetworkAddress& rhs) const;
bool operator != (const NetworkAddress& rhs) const; bool operator != (const NetworkAddress& rhs) const;
bool operator < (const NetworkAddress& rhs) const; bool operator < (const NetworkAddress& rhs) const;

View File

@ -150,7 +150,7 @@ extern Logger GLogger;
{\ {\
if (GLogger.level() >= level_)\ if (GLogger.level() >= level_)\
{\ {\
LogLock l(GLogger.mutex());\ LogLock log_lock(GLogger.mutex());\
GLogger.beginLine(level_, __FILE__, __LINE__, subsystem_);\ GLogger.beginLine(level_, __FILE__, __LINE__, subsystem_);\
GLogger args_;\ GLogger args_;\
GLogger.endLine();\ GLogger.endLine();\

View File

@ -69,6 +69,8 @@ public:
/** Returns the least significant word. */ /** Returns the least significant word. */
uint32_t GetLSW() const { return lsw; } uint32_t GetLSW() const { return lsw; }
uint64_t Get64() const { return (uint64_t)msw << 32 | lsw; }
private: private:
uint32_t msw,lsw; uint32_t msw,lsw;
}; };

8
src/libs/oboe/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*/.DS_Store
.DS_Store
.externalNativeBuild/
.cxx/
.idea
build
.logpile

9
src/libs/oboe/AUTHORS Normal file
View File

@ -0,0 +1,9 @@
# This is the official list of authors for copyright purposes.
# This file is distinct from the CONTRIBUTORS files.
# See the latter for an explanation.
# Names should be added to this file as:
# Name or Organization <email address>
# The email address is not required for organizations.
Google Inc.

View File

@ -0,0 +1,98 @@
cmake_minimum_required(VERSION 3.4.1)
# Set the name of the project and store it in PROJECT_NAME. Also set the following variables:
# PROJECT_SOURCE_DIR (usually the root directory where Oboe has been cloned e.g.)
# PROJECT_BINARY_DIR (usually the containing project's binary directory,
# e.g. ${OBOE_HOME}/samples/RhythmGame/.externalNativeBuild/cmake/ndkExtractorDebug/x86/oboe-bin)
project(oboe)
set (oboe_sources
src/aaudio/AAudioLoader.cpp
src/aaudio/AudioStreamAAudio.cpp
src/common/AudioSourceCaller.cpp
src/common/AudioStream.cpp
src/common/AudioStreamBuilder.cpp
src/common/DataConversionFlowGraph.cpp
src/common/FilterAudioStream.cpp
src/common/FixedBlockAdapter.cpp
src/common/FixedBlockReader.cpp
src/common/FixedBlockWriter.cpp
src/common/LatencyTuner.cpp
src/common/SourceFloatCaller.cpp
src/common/SourceI16Caller.cpp
src/common/SourceI24Caller.cpp
src/common/SourceI32Caller.cpp
src/common/Utilities.cpp
src/common/QuirksManager.cpp
src/fifo/FifoBuffer.cpp
src/fifo/FifoController.cpp
src/fifo/FifoControllerBase.cpp
src/fifo/FifoControllerIndirect.cpp
src/flowgraph/FlowGraphNode.cpp
src/flowgraph/ChannelCountConverter.cpp
src/flowgraph/ClipToRange.cpp
src/flowgraph/ManyToMultiConverter.cpp
src/flowgraph/MonoToMultiConverter.cpp
src/flowgraph/MultiToMonoConverter.cpp
src/flowgraph/RampLinear.cpp
src/flowgraph/SampleRateConverter.cpp
src/flowgraph/SinkFloat.cpp
src/flowgraph/SinkI16.cpp
src/flowgraph/SinkI24.cpp
src/flowgraph/SinkI32.cpp
src/flowgraph/SourceFloat.cpp
src/flowgraph/SourceI16.cpp
src/flowgraph/SourceI24.cpp
src/flowgraph/SourceI32.cpp
src/flowgraph/resampler/IntegerRatio.cpp
src/flowgraph/resampler/LinearResampler.cpp
src/flowgraph/resampler/MultiChannelResampler.cpp
src/flowgraph/resampler/PolyphaseResampler.cpp
src/flowgraph/resampler/PolyphaseResamplerMono.cpp
src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
src/flowgraph/resampler/SincResampler.cpp
src/flowgraph/resampler/SincResamplerStereo.cpp
src/opensles/AudioInputStreamOpenSLES.cpp
src/opensles/AudioOutputStreamOpenSLES.cpp
src/opensles/AudioStreamBuffered.cpp
src/opensles/AudioStreamOpenSLES.cpp
src/opensles/EngineOpenSLES.cpp
src/opensles/OpenSLESUtilities.cpp
src/opensles/OutputMixerOpenSLES.cpp
src/common/StabilizedCallback.cpp
src/common/Trace.cpp
src/common/Version.cpp
)
add_library(oboe ${oboe_sources})
# Specify directories which the compiler should look for headers
target_include_directories(oboe
PRIVATE src
PUBLIC include)
# Compile Flags:
# Enable -Werror when building debug config
# Enable -Ofast
target_compile_options(oboe
PRIVATE
-std=c++17
-Wall
-Wextra-semi
-Wshadow
-Wshadow-field
-Ofast
"$<$<CONFIG:DEBUG>:-Werror>")
# Enable logging of D,V for debug builds
target_compile_definitions(oboe PUBLIC $<$<CONFIG:DEBUG>:OBOE_ENABLE_LOGGING=1>)
target_link_libraries(oboe PRIVATE log OpenSLES)
# When installing oboe put the libraries in the lib/<ABI> folder e.g. lib/arm64-v8a
install(TARGETS oboe
LIBRARY DESTINATION lib/${ANDROID_ABI}
ARCHIVE DESTINATION lib/${ANDROID_ABI})
# Also install the headers
install(DIRECTORY include/oboe DESTINATION include)

View File

@ -0,0 +1 @@
Please see the CONTRIBUTING.md file for more information.

View File

@ -0,0 +1,25 @@
Want to contribute? Great! First, read this page (including the small print at the end).
### Before you contribute
Before we can use your code, you must sign the
[Google Individual Contributor License
Agreement](https://developers.google.com/open-source/cla/individual?csw=1)
(CLA), which you can do online. The CLA is necessary mainly because you own the
copyright to your changes, even after your contribution becomes part of our
codebase, so we need your permission to use and distribute your code. We also
need to be sure of various other things—for instance that you'll tell us if you
know that your code infringes on other people's patents. You don't have to sign
the CLA until after you've submitted your code for review and a member has
approved it, but you must do it before we can put your code into our codebase.
Before you start working on a larger contribution, you should get in touch with
us first through the issue tracker with your idea so that we can help out and
possibly guide you. Coordinating up front makes it much easier to avoid
frustration later on.
### Code reviews
All submissions, including submissions by project members, require review. We
use Github pull requests for this purpose.
### The small print
Contributions made by corporations are covered by a different agreement than
the one above, the Software Grant and Corporate Contributor License Agreement.

View File

@ -0,0 +1,14 @@
# People who have agreed to one of the CLAs and can contribute patches.
# The AUTHORS file lists the copyright holders; this file
# lists people. For example, Google employees are listed here
# but not in AUTHORS, because Google holds the copyright.
#
# https://developers.google.com/open-source/cla/individual
# https://developers.google.com/open-source/cla/corporate
#
# Names should be added to this file as:
# Name <email address>
Phil Burk <philburk@google.com>
Don Turner <donturner@google.com>
Mikhail Naganov <mnaganov@google.com>

2482
src/libs/oboe/Doxyfile Normal file

File diff suppressed because it is too large Load Diff

202
src/libs/oboe/LICENSE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

202
src/libs/oboe/NOTICE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

1
src/libs/oboe/README Normal file
View File

@ -0,0 +1 @@
Please see the README.md file for more information.

54
src/libs/oboe/README.md Normal file
View File

@ -0,0 +1,54 @@
# Oboe [![Build CI](https://github.com/google/oboe/workflows/Build%20CI/badge.svg)](https://github.com/google/oboe/actions)
[![Introduction to Oboe video](docs/images/getting-started-video.jpg)](https://www.youtube.com/watch?v=csfHAbr5ilI&list=PLWz5rJ2EKKc_duWv9IPNvx9YBudNMmLSa)
Oboe is a C++ library which makes it easy to build high-performance audio apps on Android. It was created primarily to allow developers to target a simplified API that works across multiple API levels back to API level 16 (Jelly Bean).
## Features
- Compatible with API 16 onwards - runs on 99% of Android devices
- Chooses the audio API (OpenSL ES on API 16+ or AAudio on API 27+) which will give the best audio performance on the target Android device
- Automatic latency tuning
- Modern C++ allowing you to write clean, elegant code
- Workarounds for some known issues
- [Used by popular apps and frameworks](https://github.com/google/oboe/wiki/AppsUsingOboe)
## Documentation
- [Getting Started Guide](docs/GettingStarted.md)
- [Full Guide to Oboe](docs/FullGuide.md)
- [API reference](https://google.github.io/oboe/reference)
- [Tech Notes](docs/notes/)
- [History of Audio features/bugs by Android version](docs/AndroidAudioHistory.md)
- [Migration guide for apps using OpenSL ES](docs/OpenSLESMigration.md)
- [Frequently Asked Questions](docs/FAQ.md) (FAQ)
- [Our roadmap](https://github.com/google/oboe/milestones) - Vote on a feature/issue by adding a thumbs up to the first comment.
### Community
- Reddit: [r/androidaudiodev](https://www.reddit.com/r/androidaudiodev/)
- StackOverflow: [#oboe](https://stackoverflow.com/questions/tagged/oboe)
## Testing
- [**OboeTester** app for measuring latency, glitches, etc.](https://github.com/google/oboe/tree/master/apps/OboeTester/docs)
- [Oboe unit tests](https://github.com/google/oboe/tree/master/tests)
## Videos
- [Getting started with Oboe](https://www.youtube.com/playlist?list=PLWz5rJ2EKKc_duWv9IPNvx9YBudNMmLSa)
- [Low Latency Audio - Because Your Ears Are Worth It](https://www.youtube.com/watch?v=8vOf_fDtur4) (Android Dev Summit '18)
- [Winning on Android](https://www.youtube.com/watch?v=tWBojmBpS74) - How to optimize an Android audio app. (ADC '18)
## Sample code and apps
- Sample apps can be found in the [samples directory](samples).
- A complete "effects processor" app called FXLab can be found in the [apps/fxlab folder](apps/fxlab).
- Also check out the [Rhythm Game codelab](https://developer.android.com/codelabs/musicalgame-using-oboe?hl=en#0).
### Third party sample code
- [Ableton Link integration demo](https://github.com/jbloit/AndroidLinkAudio) (author: jbloit)
## Contributing
We would love to receive your pull requests. Before we can though, please read the [contributing](CONTRIBUTING.md) guidelines.
## Version history
View the [releases page](../../releases).
## License
[LICENSE](LICENSE)

View File

@ -0,0 +1,13 @@
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build/
.idea/
/app/build/
/app/release/
/app/debug/
/app/app.iml
*.iml
/app/externalNativeBuild/

View File

@ -0,0 +1,7 @@
status: PUBLISHED
technologies: [Android, NDK]
categories: [NDK, C++]
languages: [C++, Java]
solutions: [Mobile]
github: googlesamples/android-ndk
license: apache2

View File

@ -0,0 +1,6 @@
# Oboe Tester
OboeTester is an app that can be used to test many of the features of Oboe, AAudio and OpenSL ES.
It can also be used to measure device latency and glitches.
# [OboeTester Documentation](docs)

View File

@ -0,0 +1,34 @@
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror -Wall -std=c++14 -DOBOE_NO_INCLUDE_AAUDIO -fvisibility=hidden")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O2")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3")
link_directories(${CMAKE_CURRENT_LIST_DIR}/..)
file(GLOB_RECURSE app_native_sources src/main/cpp/*)
### Name must match loadLibrary() call in MainActivity.java
add_library(oboetester SHARED ${app_native_sources})
### INCLUDE OBOE LIBRARY ###
# Set the path to the Oboe library directory
set (OBOE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
# Add the Oboe library as a subproject. Since Oboe is an out-of-tree source library we must also
# specify a binary directory
add_subdirectory(${OBOE_DIR} ./oboe-bin)
# Specify the path to the Oboe header files and the source.
include_directories(
${OBOE_DIR}/include
${OBOE_DIR}/src
)
### END OBOE INCLUDE SECTION ###
# link to oboe
target_link_libraries(oboetester log oboe atomic)
# bump 4 to resync CMake

View File

@ -0,0 +1,44 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 29
defaultConfig {
applicationId = "com.mobileer.oboetester"
minSdkVersion 23
targetSdkVersion 29
// Also update the versions in the AndroidManifest.xml file.
versionCode 52
versionName "2.1.2"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-std=c++14"
abiFilters "x86", "x86_64", "armeabi-v7a", "arm64-v8a"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
debug {
jniDebuggable true
}
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation 'com.android.support.constraint:constraint-layout:2.0.4'
testImplementation 'junit:junit:4.13-beta-3'
implementation 'com.android.support:appcompat-v7:28.0.0'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Thu Apr 11 16:29:30 PDT 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,9 @@
## This file must *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
#Thu Apr 11 16:29:25 PDT 2019
ndk.dir=/Users/philburk/Library/Android/sdk/ndk-bundle
sdk.dir=/Users/philburk/Library/Android/sdk

View File

@ -0,0 +1,17 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /Users/gfan/dev/android-sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

View File

@ -0,0 +1,127 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mobileer.oboetester"
android:versionCode="52"
android:versionName="2.1.2">
<!-- versionCode and versionName also have to be updated in build.gradle -->
<uses-feature
android:name="android.hardware.microphone"
android:required="false" />
<uses-feature
android:name="android.hardware.audio.output"
android:required="true" />
<uses-feature
android:name="android.hardware.touchscreen"
android:required="false" />
<uses-feature
android:name="android.software.midi"
android:required="false" />
<uses-feature
android:name="android.software.leanback"
android:required="false" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.INTERNET" />
<!-- This is needed for sharing test results. -->
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<application
android:allowBackup="false"
android:fullBackupContent="false"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:launchMode="singleTask"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<category android:name="android.intent.category.LEANBACK_LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".TestOutputActivity"
android:label="@string/title_activity_test_output"
android:screenOrientation="portrait" />
<activity
android:name=".TestInputActivity"
android:label="@string/title_activity_test_input"
android:screenOrientation="portrait" />
<activity
android:name=".TapToToneActivity"
android:label="@string/title_activity_output_latency"
android:screenOrientation="portrait" />
<activity
android:name=".RecorderActivity"
android:label="@string/title_activity_recorder"
android:screenOrientation="portrait" />
<activity
android:name=".EchoActivity"
android:label="@string/title_activity_echo"
android:screenOrientation="portrait" />
<activity
android:name=".RoundTripLatencyActivity"
android:label="@string/title_activity_rt_latency"
android:screenOrientation="portrait" />
<activity
android:name=".ManualGlitchActivity"
android:label="@string/title_activity_glitches"
android:screenOrientation="portrait" />
<activity
android:name=".AutomatedGlitchActivity"
android:label="@string/title_activity_auto_glitches"
android:screenOrientation="portrait" />
<activity
android:name=".TestDisconnectActivity"
android:label="@string/title_test_disconnect"
android:screenOrientation="portrait" />
<activity
android:name=".DeviceReportActivity"
android:label="@string/title_report_devices"
android:screenOrientation="portrait" />
<activity
android:name=".TestDataPathsActivity"
android:label="@string/title_data_paths"
android:screenOrientation="portrait" />
<activity
android:name=".ExtraTestsActivity"
android:exported="true"
android:label="@string/title_extra_tests" />
<activity
android:name=".ExternalTapToToneActivity"
android:label="@string/title_external_tap"
android:exported="true" />
<service
android:name=".MidiTapTester"
android:permission="android.permission.BIND_MIDI_DEVICE_SERVICE">
<intent-filter>
<action android:name="android.media.midi.MidiDeviceService" />
</intent-filter>
<meta-data
android:name="android.media.midi.MidiDeviceService"
android:resource="@xml/service_device_info" />
</service>
<provider
android:name="android.support.v4.content.FileProvider"
android:authorities="${applicationId}.provider"
android:exported="false"
android:grantUriPermissions="true">
<meta-data
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/provider_paths" />
</provider>
</application>
</manifest>

View File

@ -0,0 +1,39 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstring>
#include <sched.h>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "AudioStreamGateway.h"
using namespace oboe::flowgraph;
oboe::DataCallbackResult AudioStreamGateway::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
printScheduler();
if (mAudioSink != nullptr) {
mAudioSink->read(audioData, numFrames);
}
return oboe::DataCallbackResult::Continue;
}

View File

@ -0,0 +1,55 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_AUDIOGRAPHRUNNER_H
#define NATIVEOBOE_AUDIOGRAPHRUNNER_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "oboe/Oboe.h"
#include "OboeTesterStreamCallback.h"
using namespace oboe::flowgraph;
/**
* Bridge between an audio flowgraph and an audio device.
* Pass in an AudioSink and then pass
* this object to the AudioStreamBuilder as a callback.
*/
class AudioStreamGateway : public OboeTesterStreamCallback {
public:
virtual ~AudioStreamGateway() = default;
void setAudioSink(std::shared_ptr<oboe::flowgraph::FlowGraphSink> sink) {
mAudioSink = sink;
}
/**
* Called by Oboe when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
private:
std::shared_ptr<oboe::flowgraph::FlowGraphSink> mAudioSink;
};
#endif //NATIVEOBOE_AUDIOGRAPHRUNNER_H

View File

@ -0,0 +1,85 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "FormatConverterBox.h"
FormatConverterBox::FormatConverterBox(int32_t numSamples,
oboe::AudioFormat inputFormat,
oboe::AudioFormat outputFormat) {
mInputFormat = inputFormat;
mOutputFormat = outputFormat;
mInputBuffer = std::make_unique<uint8_t[]>(numSamples * sizeof(int32_t));
mOutputBuffer = std::make_unique<uint8_t[]>(numSamples * sizeof(int32_t));
mSource.reset();
switch (mInputFormat) {
case oboe::AudioFormat::I16:
mSource = std::make_unique<oboe::flowgraph::SourceI16>(1);
break;
case oboe::AudioFormat::I24:
mSource = std::make_unique<oboe::flowgraph::SourceI24>(1);
break;
case oboe::AudioFormat::I32:
mSource = std::make_unique<oboe::flowgraph::SourceI32>(1);
break;
case oboe::AudioFormat::Float:
case oboe::AudioFormat::Invalid:
case oboe::AudioFormat::Unspecified:
mSource = std::make_unique<oboe::flowgraph::SourceFloat>(1);
break;
}
mSink.reset();
switch (mOutputFormat) {
case oboe::AudioFormat::I16:
mSink = std::make_unique<oboe::flowgraph::SinkI16>(1);
break;
case oboe::AudioFormat::I24:
mSink = std::make_unique<oboe::flowgraph::SinkI24>(1);
break;
case oboe::AudioFormat::I32:
mSink = std::make_unique<oboe::flowgraph::SinkI32>(1);
break;
case oboe::AudioFormat::Float:
case oboe::AudioFormat::Invalid:
case oboe::AudioFormat::Unspecified:
mSink = std::make_unique<oboe::flowgraph::SinkFloat>(1);
break;
}
if (mSource && mSink) {
mSource->output.connect(&mSink->input);
mSink->pullReset();
}
}
int32_t FormatConverterBox::convertInternalBuffers(int32_t numSamples) {
return convert(getOutputBuffer(), numSamples, getInputBuffer());
}
int32_t FormatConverterBox::convertToInternalOutput(int32_t numSamples, const void *inputBuffer) {
return convert(getOutputBuffer(), numSamples, inputBuffer);
}
int32_t FormatConverterBox::convertFromInternalInput(void *outputBuffer, int32_t numSamples) {
return convert(outputBuffer, numSamples, getInputBuffer());
}
int32_t FormatConverterBox::convert(void *outputBuffer, int32_t numSamples, const void *inputBuffer) {
mSource->setData(inputBuffer, numSamples);
return mSink->read(outputBuffer, numSamples);
}

View File

@ -0,0 +1,101 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FORMAT_CONVERTER_BOX_H
#define OBOETESTER_FORMAT_CONVERTER_BOX_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "flowgraph/SinkFloat.h"
#include "flowgraph/SinkI16.h"
#include "flowgraph/SinkI24.h"
#include "flowgraph/SinkI32.h"
#include "flowgraph/SourceFloat.h"
#include "flowgraph/SourceI16.h"
#include "flowgraph/SourceI24.h"
#include "flowgraph/SourceI32.h"
/**
* Use flowgraph modules to convert between the various data formats.
*
* Note that this does not do channel conversions.
*/
class FormatConverterBox {
public:
FormatConverterBox(int32_t numSamples,
oboe::AudioFormat inputFormat,
oboe::AudioFormat outputFormat);
/**
* @return internal buffer used to store input data
*/
void *getOutputBuffer() {
return (void *) mOutputBuffer.get();
};
/**
* @return internal buffer used to store output data
*/
void *getInputBuffer() {
return (void *) mInputBuffer.get();
};
/** Convert the data from inputFormat to outputFormat
* using both internal buffers.
*/
int32_t convertInternalBuffers(int32_t numSamples);
/**
* Convert data from external buffer into internal output buffer.
* @param numSamples
* @param inputBuffer
* @return
*/
int32_t convertToInternalOutput(int32_t numSamples, const void *inputBuffer);
/**
*
* Convert data from internal input buffer into external output buffer.
* @param outputBuffer
* @param numSamples
* @return
*/
int32_t convertFromInternalInput(void *outputBuffer, int32_t numSamples);
/**
* Convert data formats between the specified external buffers.
* @param outputBuffer
* @param numSamples
* @param inputBuffer
* @return
*/
int32_t convert(void *outputBuffer, int32_t numSamples, const void *inputBuffer);
private:
oboe::AudioFormat mInputFormat{oboe::AudioFormat::Invalid};
oboe::AudioFormat mOutputFormat{oboe::AudioFormat::Invalid};
std::unique_ptr<uint8_t[]> mInputBuffer;
std::unique_ptr<uint8_t[]> mOutputBuffer;
std::unique_ptr<oboe::flowgraph::FlowGraphSourceBuffered> mSource;
std::unique_ptr<oboe::flowgraph::FlowGraphSink> mSink;
};
#endif //OBOETESTER_FORMAT_CONVERTER_BOX_H

View File

@ -0,0 +1,66 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "FullDuplexAnalyzer.h"
oboe::Result FullDuplexAnalyzer::start() {
getLoopbackProcessor()->setSampleRate(getOutputStream()->getSampleRate());
getLoopbackProcessor()->prepareToTest();
return FullDuplexStream::start();
}
oboe::DataCallbackResult FullDuplexAnalyzer::onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames) {
int32_t inputStride = getInputStream()->getChannelCount();
int32_t outputStride = getOutputStream()->getChannelCount();
const float *inputFloat = inputData;
float *outputFloat = outputData;
(void) getLoopbackProcessor()->process(inputFloat, inputStride, numInputFrames,
outputFloat, outputStride, numOutputFrames);
// write the first channel of output and input to the stereo recorder
if (mRecording != nullptr) {
float buffer[2];
int numBoth = std::min(numInputFrames, numOutputFrames);
for (int i = 0; i < numBoth; i++) {
buffer[0] = *outputFloat;
outputFloat += outputStride;
buffer[1] = *inputFloat;
inputFloat += inputStride;
mRecording->write(buffer, 1);
}
// Handle mismatch in in numFrames.
buffer[0] = 0.0f; // gap in output
for (int i = numBoth; i < numInputFrames; i++) {
buffer[1] = *inputFloat;
inputFloat += inputStride;
mRecording->write(buffer, 1);
}
buffer[1] = 0.0f; // gap in input
for (int i = numBoth; i < numOutputFrames; i++) {
buffer[0] = *outputFloat;
outputFloat += outputStride;
mRecording->write(buffer, 1);
}
}
return oboe::DataCallbackResult::Continue;
};

View File

@ -0,0 +1,64 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FULL_DUPLEX_ANALYZER_H
#define OBOETESTER_FULL_DUPLEX_ANALYZER_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FullDuplexStream.h"
#include "analyzer/LatencyAnalyzer.h"
#include "MultiChannelRecording.h"
class FullDuplexAnalyzer : public FullDuplexStream {
public:
FullDuplexAnalyzer(LoopbackProcessor *processor)
: mLoopbackProcessor(processor) {
setMNumInputBurstsCushion(1);
}
/**
* Called when data is available on both streams.
* Caller should override this method.
*/
oboe::DataCallbackResult onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames
) override;
oboe::Result start() override;
LoopbackProcessor *getLoopbackProcessor() {
return mLoopbackProcessor;
}
void setRecording(MultiChannelRecording *recording) {
mRecording = recording;
}
private:
MultiChannelRecording *mRecording = nullptr;
LoopbackProcessor * const mLoopbackProcessor;
};
#endif //OBOETESTER_FULL_DUPLEX_ANALYZER_H

View File

@ -0,0 +1,46 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "FullDuplexEcho.h"
oboe::Result FullDuplexEcho::start() {
int32_t delayFrames = (int32_t) (kMaxDelayTimeSeconds * getOutputStream()->getSampleRate());
mDelayLine = std::make_unique<InterpolatingDelayLine>(delayFrames);
return FullDuplexStream::start();
}
oboe::DataCallbackResult FullDuplexEcho::onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames) {
int32_t framesToEcho = std::min(numInputFrames, numOutputFrames);
float *inputFloat = (float *)inputData;
float *outputFloat = (float *)outputData;
// zero out entire output array
memset(outputFloat, 0, numOutputFrames * getOutputStream()->getBytesPerFrame());
int32_t inputStride = getInputStream()->getChannelCount();
int32_t outputStride = getOutputStream()->getChannelCount();
float delayFrames = mDelayTimeSeconds * getOutputStream()->getSampleRate();
while (framesToEcho-- > 0) {
*outputFloat = mDelayLine->process(delayFrames, *inputFloat); // mono delay
inputFloat += inputStride;
outputFloat += outputStride;
}
return oboe::DataCallbackResult::Continue;
};

View File

@ -0,0 +1,57 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FULL_DUPLEX_ECHO_H
#define OBOETESTER_FULL_DUPLEX_ECHO_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FullDuplexStream.h"
#include "InterpolatingDelayLine.h"
class FullDuplexEcho : public FullDuplexStream {
public:
FullDuplexEcho() {
setMNumInputBurstsCushion(0);
}
/**
* Called when data is available on both streams.
* Caller should override this method.
*/
oboe::DataCallbackResult onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames
) override;
oboe::Result start() override;
void setDelayTime(double delayTimeSeconds) {
mDelayTimeSeconds = delayTimeSeconds;
}
private:
std::unique_ptr<InterpolatingDelayLine> mDelayLine;
static constexpr double kMaxDelayTimeSeconds = 4.0;
double mDelayTimeSeconds = kMaxDelayTimeSeconds;
};
#endif //OBOETESTER_FULL_DUPLEX_ECHO_H

View File

@ -0,0 +1,152 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "FullDuplexStream.h"
oboe::ResultWithValue<int32_t> FullDuplexStream::readInput(int32_t numFrames) {
oboe::ResultWithValue<int32_t> result = getInputStream()->read(
mInputConverter->getInputBuffer(),
numFrames,
0 /* timeout */);
if (result == oboe::Result::OK) {
int32_t numSamples = result.value() * getInputStream()->getChannelCount();
mInputConverter->convertInternalBuffers(numSamples);
}
return result;
}
oboe::DataCallbackResult FullDuplexStream::onAudioReady(
oboe::AudioStream *outputStream,
void *audioData,
int numFrames) {
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Continue;
int32_t actualFramesRead = 0;
// Silence the output.
int32_t numBytes = numFrames * outputStream->getBytesPerFrame();
memset(audioData, 0 /* value */, numBytes);
if (mCountCallbacksToDrain > 0) {
// Drain the input.
int32_t totalFramesRead = 0;
do {
oboe::ResultWithValue<int32_t> result = readInput(numFrames);
if (!result) {
// Ignore errors because input stream may not be started yet.
break;
}
actualFramesRead = result.value();
totalFramesRead += actualFramesRead;
} while (actualFramesRead > 0);
// Only counts if we actually got some data.
if (totalFramesRead > 0) {
mCountCallbacksToDrain--;
}
} else if (mCountInputBurstsCushion > 0) {
// Let the input fill up a bit so we are not so close to the write pointer.
mCountInputBurstsCushion--;
} else if (mCountCallbacksToDiscard > 0) {
mCountCallbacksToDiscard--;
// Ignore. Allow the input to reach to equilibrium with the output.
oboe::ResultWithValue<int32_t> resultAvailable = getInputStream()->getAvailableFrames();
if (!resultAvailable) {
LOGE("%s() getAvailableFrames() returned %s\n",
__func__, convertToText(resultAvailable.error()));
callbackResult = oboe::DataCallbackResult::Stop;
} else {
int32_t framesAvailable = resultAvailable.value();
if (framesAvailable >= mMinimumFramesBeforeRead) {
oboe::ResultWithValue<int32_t> resultRead = readInput(numFrames);
if (!resultRead) {
LOGE("%s() read() returned %s\n", __func__, convertToText(resultRead.error()));
callbackResult = oboe::DataCallbackResult::Stop;
}
}
}
} else {
int32_t framesRead = 0;
oboe::ResultWithValue<int32_t> resultAvailable = getInputStream()->getAvailableFrames();
if (!resultAvailable) {
LOGE("%s() getAvailableFrames() returned %s\n", __func__, convertToText(resultAvailable.error()));
callbackResult = oboe::DataCallbackResult::Stop;
} else {
int32_t framesAvailable = resultAvailable.value();
if (framesAvailable >= mMinimumFramesBeforeRead) {
// Read data into input buffer.
oboe::ResultWithValue<int32_t> resultRead = readInput(numFrames);
if (!resultRead) {
LOGE("%s() read() returned %s\n", __func__, convertToText(resultRead.error()));
callbackResult = oboe::DataCallbackResult::Stop;
} else {
framesRead = resultRead.value();
}
}
}
if (callbackResult == oboe::DataCallbackResult::Continue) {
callbackResult = onBothStreamsReady(
(const float *) mInputConverter->getOutputBuffer(),
framesRead,
(float *) mOutputConverter->getInputBuffer(), numFrames);
mOutputConverter->convertFromInternalInput( audioData,
numFrames * getOutputStream()->getChannelCount());
}
}
if (callbackResult == oboe::DataCallbackResult::Stop) {
getInputStream()->requestStop();
}
return callbackResult;
}
oboe::Result FullDuplexStream::start() {
mCountCallbacksToDrain = kNumCallbacksToDrain;
mCountInputBurstsCushion = mNumInputBurstsCushion;
mCountCallbacksToDiscard = kNumCallbacksToDiscard;
// Determine maximum size that could possibly be called.
int32_t bufferSize = getOutputStream()->getBufferCapacityInFrames()
* getOutputStream()->getChannelCount();
mInputConverter = std::make_unique<FormatConverterBox>(bufferSize,
getInputStream()->getFormat(),
oboe::AudioFormat::Float);
mOutputConverter = std::make_unique<FormatConverterBox>(bufferSize,
oboe::AudioFormat::Float,
getOutputStream()->getFormat());
oboe::Result result = getInputStream()->requestStart();
if (result != oboe::Result::OK) {
return result;
}
return getOutputStream()->requestStart();
}
oboe::Result FullDuplexStream::stop() {
getOutputStream()->requestStop(); // TODO result?
return getInputStream()->requestStop();
}
int32_t FullDuplexStream::getMNumInputBurstsCushion() const {
return mNumInputBurstsCushion;
}
void FullDuplexStream::setMNumInputBurstsCushion(int32_t numBursts) {
FullDuplexStream::mNumInputBurstsCushion = numBursts;
}

View File

@ -0,0 +1,119 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FULL_DUPLEX_STREAM_H
#define OBOETESTER_FULL_DUPLEX_STREAM_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FormatConverterBox.h"
class FullDuplexStream : public oboe::AudioStreamCallback {
public:
FullDuplexStream() {}
virtual ~FullDuplexStream() = default;
void setInputStream(oboe::AudioStream *stream) {
mInputStream = stream;
}
oboe::AudioStream *getInputStream() {
return mInputStream;
}
void setOutputStream(oboe::AudioStream *stream) {
mOutputStream = stream;
}
oboe::AudioStream *getOutputStream() {
return mOutputStream;
}
virtual oboe::Result start();
virtual oboe::Result stop();
oboe::ResultWithValue<int32_t> readInput(int32_t numFrames);
/**
* Called when data is available on both streams.
* Caller should override this method.
*/
virtual oboe::DataCallbackResult onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames
) = 0;
/**
* Called by Oboe when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
int32_t getMNumInputBurstsCushion() const;
/**
* Number of bursts to leave in the input buffer as a cushion.
* Typically 0 for latency measurements
* or 1 for glitch tests.
*
* @param mNumInputBurstsCushion
*/
void setMNumInputBurstsCushion(int32_t mNumInputBurstsCushion);
void setMinimumFramesBeforeRead(int32_t numFrames) {
mMinimumFramesBeforeRead = numFrames;
}
int32_t getMinimumFramesBeforeRead() const {
return mMinimumFramesBeforeRead;
}
private:
// TODO add getters and setters
static constexpr int32_t kNumCallbacksToDrain = 20;
static constexpr int32_t kNumCallbacksToDiscard = 30;
// let input fill back up, usually 0 or 1
int32_t mNumInputBurstsCushion = 0;
int32_t mMinimumFramesBeforeRead = 0;
// We want to reach a state where the input buffer is empty and
// the output buffer is full.
// These are used in order.
// Drain several callback so that input is empty.
int32_t mCountCallbacksToDrain = kNumCallbacksToDrain;
// Let the input fill back up slightly so we don't run dry.
int32_t mCountInputBurstsCushion = mNumInputBurstsCushion;
// Discard some callbacks so the input and output reach equilibrium.
int32_t mCountCallbacksToDiscard = kNumCallbacksToDiscard;
oboe::AudioStream *mInputStream = nullptr;
oboe::AudioStream *mOutputStream = nullptr;
std::unique_ptr<FormatConverterBox> mInputConverter;
std::unique_ptr<FormatConverterBox> mOutputConverter;
};
#endif //OBOETESTER_FULL_DUPLEX_STREAM_H

View File

@ -0,0 +1,42 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "InputStreamCallbackAnalyzer.h"
oboe::DataCallbackResult InputStreamCallbackAnalyzer::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
int32_t channelCount = audioStream->getChannelCount();
printScheduler();
mInputConverter->convertToInternalOutput(numFrames * channelCount, audioData);
float *floatData = (float *) mInputConverter->getOutputBuffer();
if (mRecording != nullptr) {
mRecording->write(floatData, numFrames);
}
int32_t sampleIndex = 0;
for (int iFrame = 0; iFrame < numFrames; iFrame++) {
for (int iChannel = 0; iChannel < channelCount; iChannel++) {
float sample = floatData[sampleIndex++];
mPeakDetectors[iChannel].process(sample);
}
}
audioStream->waitForAvailableFrames(mMinimumFramesBeforeRead, oboe::kNanosPerSecond);
return oboe::DataCallbackResult::Continue;
}

View File

@ -0,0 +1,87 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_INPUTSTREAMCALLBACKANALYZER_H
#define NATIVEOBOE_INPUTSTREAMCALLBACKANALYZER_H
#include <unistd.h>
#include <sys/types.h>
// TODO #include "flowgraph/FlowGraph.h"
#include "oboe/Oboe.h"
#include "analyzer/PeakDetector.h"
#include "FormatConverterBox.h"
#include "MultiChannelRecording.h"
#include "OboeTesterStreamCallback.h"
constexpr int kMaxInputChannels = 8;
class InputStreamCallbackAnalyzer : public OboeTesterStreamCallback {
public:
void reset() {
for (auto detector : mPeakDetectors) {
detector.reset();
}
OboeTesterStreamCallback::reset();
}
void setup(int32_t maxFramesPerCallback,
int32_t channelCount,
oboe::AudioFormat inputFormat) {
int32_t bufferSize = maxFramesPerCallback * channelCount;
mInputConverter = std::make_unique<FormatConverterBox>(bufferSize,
inputFormat,
oboe::AudioFormat::Float);
}
/**
* Called by Oboe when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
void setRecording(MultiChannelRecording *recording) {
mRecording = recording;
}
double getPeakLevel(int index) {
return mPeakDetectors[index].getLevel();
}
void setMinimumFramesBeforeRead(int32_t numFrames) {
mMinimumFramesBeforeRead = numFrames;
}
int32_t getMinimumFramesBeforeRead() {
return mMinimumFramesBeforeRead;
}
public:
PeakDetector mPeakDetectors[kMaxInputChannels];
MultiChannelRecording *mRecording = nullptr;
private:
std::unique_ptr<FormatConverterBox> mInputConverter;
int32_t mMinimumFramesBeforeRead = 0;
};
#endif //NATIVEOBOE_INPUTSTREAMCALLBACKANALYZER_H

View File

@ -0,0 +1,42 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "InterpolatingDelayLine.h"
InterpolatingDelayLine::InterpolatingDelayLine(int32_t delaySize) {
mDelaySize = delaySize;
mDelayLine = std::make_unique<float[]>(delaySize);
}
float InterpolatingDelayLine::process(float delay, float input) {
float *writeAddress = mDelayLine.get() + mCursor;
*writeAddress = input;
mDelayLine.get()[mCursor] = input;
int32_t delayInt = std::min(mDelaySize - 1, (int32_t) delay);
int32_t readIndex = mCursor - delayInt;
if (readIndex < 0) {
readIndex += mDelaySize;
}
// TODO interpolate
float *readAddress = mDelayLine.get() + readIndex;
float output = *readAddress;
mCursor++;
if (mCursor >= mDelaySize) {
mCursor = 0;
}
return output;
};

View File

@ -0,0 +1,48 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_INTERPOLATING_DELAY_LINE_H
#define OBOETESTER_INTERPOLATING_DELAY_LINE_H
#include <memory>
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FullDuplexStream.h"
/**
* Monophonic delay line.
*/
class InterpolatingDelayLine {
public:
explicit InterpolatingDelayLine(int32_t delaySize);
/**
* @param input sample to be written to the delay line
* @param delay number of samples to delay the output
* @return delayed value
*/
float process(float delay, float input);
private:
std::unique_ptr<float[]> mDelayLine;
int32_t mCursor = 0;
int32_t mDelaySize = 0;
};
#endif //OBOETESTER_INTERPOLATING_DELAY_LINE_H

View File

@ -0,0 +1,161 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_MULTICHANNEL_RECORDING_H
#define NATIVEOBOE_MULTICHANNEL_RECORDING_H
#include <memory.h>
#include <unistd.h>
#include <sys/types.h>
/**
* Store multi-channel audio data in float format.
* The most recent data will be saved.
* Old data may be overwritten.
*
* Note that this is not thread safe. Do not read and write from separate threads.
*/
class MultiChannelRecording {
public:
MultiChannelRecording(int32_t channelCount, int32_t maxFrames)
: mChannelCount(channelCount)
, mMaxFrames(maxFrames) {
mData = new float[channelCount * maxFrames];
}
~MultiChannelRecording() {
delete[] mData;
}
void rewind() {
mReadCursorFrames = mWriteCursorFrames - getSizeInFrames();
}
void clear() {
mReadCursorFrames = 0;
mWriteCursorFrames = 0;
}
int32_t getChannelCount() {
return mChannelCount;
}
int32_t getSizeInFrames() {
return (int32_t) std::min(mWriteCursorFrames, static_cast<int64_t>(mMaxFrames));
}
int32_t getReadIndex() {
return mReadCursorFrames % mMaxFrames;
}
int32_t getWriteIndex() {
return mWriteCursorFrames % mMaxFrames;
}
/**
* Write numFrames from the short buffer into the recording.
* Overwrite old data if necessary.
* Convert shorts to floats.
*
* @param buffer
* @param numFrames
* @return number of frames actually written.
*/
int32_t write(int16_t *buffer, int32_t numFrames) {
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
int32_t indexFrame = getWriteIndex();
// contiguous writes
int32_t framesToEndOfBuffer = mMaxFrames - indexFrame;
int32_t framesNow = std::min(framesLeft, framesToEndOfBuffer);
int32_t numSamples = framesNow * mChannelCount;
int32_t sampleIndex = indexFrame * mChannelCount;
for (int i = 0; i < numSamples; i++) {
mData[sampleIndex++] = *buffer++ * (1.0f / 32768);
}
mWriteCursorFrames += framesNow;
framesLeft -= framesNow;
}
return numFrames - framesLeft;
}
/**
* Write all numFrames from the float buffer into the recording.
* Overwrite old data if full.
* @param buffer
* @param numFrames
* @return number of frames actually written.
*/
int32_t write(float *buffer, int32_t numFrames) {
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
int32_t indexFrame = getWriteIndex();
// contiguous writes
int32_t framesToEnd = mMaxFrames - indexFrame;
int32_t framesNow = std::min(framesLeft, framesToEnd);
int32_t numSamples = framesNow * mChannelCount;
int32_t sampleIndex = indexFrame * mChannelCount;
memcpy(&mData[sampleIndex],
buffer,
(numSamples * sizeof(float)));
buffer += numSamples;
mWriteCursorFrames += framesNow;
framesLeft -= framesNow;
}
return numFrames;
}
/**
* Read numFrames from the recording into the buffer, if there is enough data.
* Start at the cursor position, aligned up to the next frame.
* @param buffer
* @param numFrames
* @return number of frames actually read.
*/
int32_t read(float *buffer, int32_t numFrames) {
int32_t framesRead = 0;
int32_t framesLeft = std::min(numFrames,
std::min(mMaxFrames, (int32_t)(mWriteCursorFrames - mReadCursorFrames)));
while (framesLeft > 0) {
int32_t indexFrame = getReadIndex();
// contiguous reads
int32_t framesToEnd = mMaxFrames - indexFrame;
int32_t framesNow = std::min(framesLeft, framesToEnd);
int32_t numSamples = framesNow * mChannelCount;
int32_t sampleIndex = indexFrame * mChannelCount;
memcpy(buffer,
&mData[sampleIndex],
(numSamples * sizeof(float)));
mReadCursorFrames += framesNow;
framesLeft -= framesNow;
framesRead += framesNow;
}
return framesRead;
}
private:
float *mData = nullptr;
int64_t mReadCursorFrames = 0;
int64_t mWriteCursorFrames = 0; // monotonically increasing
const int32_t mChannelCount;
const int32_t mMaxFrames;
};
#endif //NATIVEOBOE_MULTICHANNEL_RECORDING_H

View File

@ -0,0 +1,702 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Set to 1 for debugging race condition #1180 with mAAudioStream.
// See also AudioStreamAAudio.cpp in Oboe.
// This was left in the code so that we could test the fix again easily in the future.
// We could not trigger the race condition without adding these get calls and the sleeps.
#define DEBUG_CLOSE_RACE 0
#include <fstream>
#include <iostream>
#if DEBUG_CLOSE_RACE
#include <thread>
#endif // DEBUG_CLOSE_RACE
#include <vector>
#include <common/AudioClock.h>
#include "util/WaveFileWriter.h"
#include "NativeAudioContext.h"
using namespace oboe;
static oboe::AudioApi convertNativeApiToAudioApi(int nativeApi) {
switch (nativeApi) {
default:
case NATIVE_MODE_UNSPECIFIED:
return oboe::AudioApi::Unspecified;
case NATIVE_MODE_AAUDIO:
return oboe::AudioApi::AAudio;
case NATIVE_MODE_OPENSLES:
return oboe::AudioApi::OpenSLES;
}
}
class MyOboeOutputStream : public WaveFileOutputStream {
public:
void write(uint8_t b) override {
mData.push_back(b);
}
int32_t length() {
return (int32_t) mData.size();
}
uint8_t *getData() {
return mData.data();
}
private:
std::vector<uint8_t> mData;
};
bool ActivityContext::mUseCallback = true;
int ActivityContext::callbackSize = 0;
std::shared_ptr<oboe::AudioStream> ActivityContext::getOutputStream() {
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
if (oboeStream->getDirection() == oboe::Direction::Output) {
return oboeStream;
}
}
return nullptr;
}
std::shared_ptr<oboe::AudioStream> ActivityContext::getInputStream() {
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
if (oboeStream != nullptr) {
if (oboeStream->getDirection() == oboe::Direction::Input) {
return oboeStream;
}
}
}
return nullptr;
}
void ActivityContext::freeStreamIndex(int32_t streamIndex) {
mOboeStreams[streamIndex].reset();
mOboeStreams.erase(streamIndex);
}
int32_t ActivityContext::allocateStreamIndex() {
return mNextStreamHandle++;
}
void ActivityContext::close(int32_t streamIndex) {
stopBlockingIOThread();
std::shared_ptr<oboe::AudioStream> oboeStream = getStream(streamIndex);
if (oboeStream != nullptr) {
oboeStream->close();
LOGD("ActivityContext::%s() delete stream %d ", __func__, streamIndex);
freeStreamIndex(streamIndex);
}
}
bool ActivityContext::isMMapUsed(int32_t streamIndex) {
std::shared_ptr<oboe::AudioStream> oboeStream = getStream(streamIndex);
if (oboeStream == nullptr) return false;
if (oboeStream->getAudioApi() != AudioApi::AAudio) return false;
return AAudioExtensions::getInstance().isMMapUsed(oboeStream.get());
}
oboe::Result ActivityContext::pause() {
oboe::Result result = oboe::Result::OK;
stopBlockingIOThread();
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
result = oboeStream->requestPause();
}
return result;
}
oboe::Result ActivityContext::stopAllStreams() {
oboe::Result result = oboe::Result::OK;
stopBlockingIOThread();
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
result = oboeStream->requestStop();
}
return result;
}
void ActivityContext::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
// We needed the proxy because we did not know the channelCount when we setup the Builder.
if (mUseCallback) {
builder.setDataCallback(&oboeCallbackProxy);
}
}
int ActivityContext::open(jint nativeApi,
jint sampleRate,
jint channelCount,
jint format,
jint sharingMode,
jint performanceMode,
jint inputPreset,
jint usage,
jint deviceId,
jint sessionId,
jint framesPerBurst,
jboolean channelConversionAllowed,
jboolean formatConversionAllowed,
jint rateConversionQuality,
jboolean isMMap,
jboolean isInput) {
oboe::AudioApi audioApi = oboe::AudioApi::Unspecified;
switch (nativeApi) {
case NATIVE_MODE_UNSPECIFIED:
case NATIVE_MODE_AAUDIO:
case NATIVE_MODE_OPENSLES:
audioApi = convertNativeApiToAudioApi(nativeApi);
break;
default:
return (jint) oboe::Result::ErrorOutOfRange;
}
int32_t streamIndex = allocateStreamIndex();
if (streamIndex < 0) {
LOGE("ActivityContext::open() stream array full");
return (jint) oboe::Result::ErrorNoFreeHandles;
}
if (channelCount < 0 || channelCount > 256) {
LOGE("ActivityContext::open() channels out of range");
return (jint) oboe::Result::ErrorOutOfRange;
}
// Create an audio stream.
oboe::AudioStreamBuilder builder;
builder.setChannelCount(channelCount)
->setDirection(isInput ? oboe::Direction::Input : oboe::Direction::Output)
->setSharingMode((oboe::SharingMode) sharingMode)
->setPerformanceMode((oboe::PerformanceMode) performanceMode)
->setInputPreset((oboe::InputPreset)inputPreset)
->setUsage((oboe::Usage)usage)
->setDeviceId(deviceId)
->setSessionId((oboe::SessionId) sessionId)
->setSampleRate(sampleRate)
->setFormat((oboe::AudioFormat) format)
->setChannelConversionAllowed(channelConversionAllowed)
->setFormatConversionAllowed(formatConversionAllowed)
->setSampleRateConversionQuality((oboe::SampleRateConversionQuality) rateConversionQuality)
;
if (mUseCallback) {
builder.setFramesPerCallback(callbackSize);
}
configureBuilder(isInput, builder);
builder.setAudioApi(audioApi);
// Temporarily set the AAudio MMAP policy to disable MMAP or not.
bool oldMMapEnabled = AAudioExtensions::getInstance().isMMapEnabled();
AAudioExtensions::getInstance().setMMapEnabled(isMMap);
// Record time for opening.
if (isInput) {
mInputOpenedAt = oboe::AudioClock::getNanoseconds();
} else {
mOutputOpenedAt = oboe::AudioClock::getNanoseconds();
}
// Open a stream based on the builder settings.
std::shared_ptr<oboe::AudioStream> oboeStream;
Result result = builder.openStream(oboeStream);
AAudioExtensions::getInstance().setMMapEnabled(oldMMapEnabled);
if (result != Result::OK) {
freeStreamIndex(streamIndex);
streamIndex = -1;
} else {
mOboeStreams[streamIndex] = oboeStream; // save shared_ptr
mChannelCount = oboeStream->getChannelCount(); // FIXME store per stream
mFramesPerBurst = oboeStream->getFramesPerBurst();
mSampleRate = oboeStream->getSampleRate();
createRecording();
finishOpen(isInput, oboeStream.get());
}
if (!mUseCallback) {
int numSamples = getFramesPerBlock() * mChannelCount;
dataBuffer = std::make_unique<float[]>(numSamples);
}
return (result != Result::OK) ? (int)result : streamIndex;
}
oboe::Result ActivityContext::start() {
oboe::Result result = oboe::Result::OK;
std::shared_ptr<oboe::AudioStream> inputStream = getInputStream();
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (inputStream == nullptr && outputStream == nullptr) {
LOGD("%s() - no streams defined", __func__);
return oboe::Result::ErrorInvalidState; // not open
}
configureForStart();
audioStreamGateway.reset();
result = startStreams();
if (!mUseCallback && result == oboe::Result::OK) {
// Instead of using the callback, start a thread that writes the stream.
threadEnabled.store(true);
dataThread = new std::thread(threadCallback, this);
}
#if DEBUG_CLOSE_RACE
// Also put a sleep for 400 msec in AudioStreamAAudio::updateFramesRead().
if (outputStream != nullptr) {
std::thread raceDebugger([outputStream]() {
while (outputStream->getState() != StreamState::Closed) {
int64_t framesRead = outputStream->getFramesRead();
LOGD("raceDebugger, framesRead = %d, state = %d",
(int) framesRead, (int) outputStream->getState());
}
});
raceDebugger.detach();
}
#endif // DEBUG_CLOSE_RACE
return result;
}
int32_t ActivityContext::saveWaveFile(const char *filename) {
if (mRecording == nullptr) {
LOGW("ActivityContext::saveWaveFile(%s) but no recording!", filename);
return -1;
}
if (mRecording->getSizeInFrames() == 0) {
LOGW("ActivityContext::saveWaveFile(%s) but no frames!", filename);
return -2;
}
MyOboeOutputStream outStream;
WaveFileWriter writer(&outStream);
writer.setFrameRate(mSampleRate);
writer.setSamplesPerFrame(mRecording->getChannelCount());
writer.setBitsPerSample(24);
float buffer[mRecording->getChannelCount()];
// Read samples from start to finish.
mRecording->rewind();
for (int32_t frameIndex = 0; frameIndex < mRecording->getSizeInFrames(); frameIndex++) {
mRecording->read(buffer, 1 /* numFrames */);
for (int32_t i = 0; i < mRecording->getChannelCount(); i++) {
writer.write(buffer[i]);
}
}
writer.close();
if (outStream.length() > 0) {
auto myfile = std::ofstream(filename, std::ios::out | std::ios::binary);
myfile.write((char *) outStream.getData(), outStream.length());
myfile.close();
}
return outStream.length();
}
double ActivityContext::getTimestampLatency(int32_t streamIndex) {
std::shared_ptr<oboe::AudioStream> oboeStream = getStream(streamIndex);
if (oboeStream != nullptr) {
auto result = oboeStream->calculateLatencyMillis();
return (!result) ? -1.0 : result.value();
}
return -1.0;
}
// =================================================================== ActivityTestOutput
void ActivityTestOutput::close(int32_t streamIndex) {
ActivityContext::close(streamIndex);
manyToMulti.reset(nullptr);
monoToMulti.reset(nullptr);
mSinkFloat.reset();
mSinkI16.reset();
mSinkI24.reset();
mSinkI32.reset();
}
void ActivityTestOutput::setChannelEnabled(int channelIndex, bool enabled) {
if (manyToMulti == nullptr) {
return;
}
if (enabled) {
switch (mSignalType) {
case SignalType::Sine:
sineOscillators[channelIndex].frequency.disconnect();
sineOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
case SignalType::Sawtooth:
sawtoothOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
case SignalType::FreqSweep:
mLinearShape.output.connect(&sineOscillators[channelIndex].frequency);
sineOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
case SignalType::PitchSweep:
mExponentialShape.output.connect(&sineOscillators[channelIndex].frequency);
sineOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
default:
break;
}
} else {
manyToMulti->inputs[channelIndex]->disconnect();
}
}
void ActivityTestOutput::configureForStart() {
manyToMulti = std::make_unique<ManyToMultiConverter>(mChannelCount);
mSinkFloat = std::make_shared<SinkFloat>(mChannelCount);
mSinkI16 = std::make_shared<SinkI16>(mChannelCount);
mSinkI24 = std::make_shared<SinkI24>(mChannelCount);
mSinkI32 = std::make_shared<SinkI32>(mChannelCount);
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
mTriangleOscillator.setSampleRate(outputStream->getSampleRate());
mTriangleOscillator.frequency.setValue(1.0/kSweepPeriod);
mTriangleOscillator.amplitude.setValue(1.0);
mTriangleOscillator.setPhase(-1.0);
mLinearShape.setMinimum(0.0);
mLinearShape.setMaximum(outputStream->getSampleRate() * 0.5); // Nyquist
mExponentialShape.setMinimum(110.0);
mExponentialShape.setMaximum(outputStream->getSampleRate() * 0.5); // Nyquist
mTriangleOscillator.output.connect(&(mLinearShape.input));
mTriangleOscillator.output.connect(&(mExponentialShape.input));
{
double frequency = 330.0;
for (int i = 0; i < mChannelCount; i++) {
sineOscillators[i].setSampleRate(outputStream->getSampleRate());
sineOscillators[i].frequency.setValue(frequency);
frequency *= 4.0 / 3.0; // each sine is at a higher frequency
sineOscillators[i].amplitude.setValue(AMPLITUDE_SINE);
setChannelEnabled(i, true);
}
}
manyToMulti->output.connect(&(mSinkFloat.get()->input));
manyToMulti->output.connect(&(mSinkI16.get()->input));
manyToMulti->output.connect(&(mSinkI24.get()->input));
manyToMulti->output.connect(&(mSinkI32.get()->input));
mSinkFloat->pullReset();
mSinkI16->pullReset();
mSinkI24->pullReset();
mSinkI32->pullReset();
configureStreamGateway();
}
void ActivityTestOutput::configureStreamGateway() {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (outputStream->getFormat() == oboe::AudioFormat::I16) {
audioStreamGateway.setAudioSink(mSinkI16);
} else if (outputStream->getFormat() == oboe::AudioFormat::I24) {
audioStreamGateway.setAudioSink(mSinkI24);
} else if (outputStream->getFormat() == oboe::AudioFormat::I32) {
audioStreamGateway.setAudioSink(mSinkI32);
} else if (outputStream->getFormat() == oboe::AudioFormat::Float) {
audioStreamGateway.setAudioSink(mSinkFloat);
}
if (mUseCallback) {
oboeCallbackProxy.setCallback(&audioStreamGateway);
}
}
void ActivityTestOutput::runBlockingIO() {
int32_t framesPerBlock = getFramesPerBlock();
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Continue;
std::shared_ptr<oboe::AudioStream> oboeStream = getOutputStream();
if (oboeStream == nullptr) {
LOGE("%s() : no stream found\n", __func__);
return;
}
while (threadEnabled.load()
&& callbackResult == oboe::DataCallbackResult::Continue) {
// generate output by calling the callback
callbackResult = audioStreamGateway.onAudioReady(oboeStream.get(),
dataBuffer.get(),
framesPerBlock);
auto result = oboeStream->write(dataBuffer.get(),
framesPerBlock,
NANOS_PER_SECOND);
if (!result) {
LOGE("%s() returned %s\n", __func__, convertToText(result.error()));
break;
}
int32_t framesWritten = result.value();
if (framesWritten < framesPerBlock) {
LOGE("%s() : write() wrote %d of %d\n", __func__, framesWritten, framesPerBlock);
break;
}
}
}
// ======================================================================= ActivityTestInput
void ActivityTestInput::configureForStart() {
mInputAnalyzer.reset();
if (mUseCallback) {
oboeCallbackProxy.setCallback(&mInputAnalyzer);
}
mInputAnalyzer.setRecording(mRecording.get());
}
void ActivityTestInput::runBlockingIO() {
int32_t framesPerBlock = getFramesPerBlock();
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Continue;
std::shared_ptr<oboe::AudioStream> oboeStream = getInputStream();
if (oboeStream == nullptr) {
LOGE("%s() : no stream found\n", __func__);
return;
}
while (threadEnabled.load()
&& callbackResult == oboe::DataCallbackResult::Continue) {
// Avoid glitches by waiting until there is extra data in the FIFO.
auto err = oboeStream->waitForAvailableFrames(mMinimumFramesBeforeRead, kNanosPerSecond);
if (!err) break;
// read from input
auto result = oboeStream->read(dataBuffer.get(),
framesPerBlock,
NANOS_PER_SECOND);
if (!result) {
LOGE("%s() : read() returned %s\n", __func__, convertToText(result.error()));
break;
}
int32_t framesRead = result.value();
if (framesRead < framesPerBlock) { // timeout?
LOGE("%s() : read() read %d of %d\n", __func__, framesRead, framesPerBlock);
break;
}
// analyze input
callbackResult = mInputAnalyzer.onAudioReady(oboeStream.get(),
dataBuffer.get(),
framesRead);
}
}
oboe::Result ActivityRecording::stopPlayback() {
oboe::Result result = oboe::Result::OK;
if (playbackStream != nullptr) {
result = playbackStream->requestStop();
playbackStream->close();
mPlayRecordingCallback.setRecording(nullptr);
delete playbackStream;
playbackStream = nullptr;
}
return result;
}
oboe::Result ActivityRecording::startPlayback() {
stop();
oboe::AudioStreamBuilder builder;
builder.setChannelCount(mChannelCount)
->setSampleRate(mSampleRate)
->setFormat(oboe::AudioFormat::Float)
->setCallback(&mPlayRecordingCallback)
->setAudioApi(oboe::AudioApi::OpenSLES);
oboe::Result result = builder.openStream(&playbackStream);
if (result != oboe::Result::OK) {
delete playbackStream;
playbackStream = nullptr;
} else if (playbackStream != nullptr) {
if (mRecording != nullptr) {
mRecording->rewind();
mPlayRecordingCallback.setRecording(mRecording.get());
result = playbackStream->requestStart();
}
}
return result;
}
// ======================================================================= ActivityTapToTone
void ActivityTapToTone::configureForStart() {
monoToMulti = std::make_unique<MonoToMultiConverter>(mChannelCount);
mSinkFloat = std::make_shared<SinkFloat>(mChannelCount);
mSinkI16 = std::make_shared<SinkI16>(mChannelCount);
mSinkI24 = std::make_shared<SinkI24>(mChannelCount);
mSinkI32 = std::make_shared<SinkI32>(mChannelCount);
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
sawPingGenerator.setSampleRate(outputStream->getSampleRate());
sawPingGenerator.frequency.setValue(FREQUENCY_SAW_PING);
sawPingGenerator.amplitude.setValue(AMPLITUDE_SAW_PING);
sawPingGenerator.output.connect(&(monoToMulti->input));
monoToMulti->output.connect(&(mSinkFloat.get()->input));
monoToMulti->output.connect(&(mSinkI16.get()->input));
monoToMulti->output.connect(&(mSinkI24.get()->input));
monoToMulti->output.connect(&(mSinkI32.get()->input));
mSinkFloat->pullReset();
mSinkI16->pullReset();
mSinkI24->pullReset();
mSinkI32->pullReset();
configureStreamGateway();
}
// ======================================================================= ActivityRoundTripLatency
void ActivityFullDuplex::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
if (isInput) {
// Ideally the output streams should be opened first.
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (outputStream != nullptr) {
// Make sure the capacity is bigger than two bursts.
int32_t burst = outputStream->getFramesPerBurst();
builder.setBufferCapacityInFrames(2 * burst);
}
}
}
// ======================================================================= ActivityEcho
void ActivityEcho::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexEcho.get() == nullptr) {
mFullDuplexEcho = std::make_unique<FullDuplexEcho>();
}
// only output uses a callback, input is polled
if (!isInput) {
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexEcho.get());
}
}
void ActivityEcho::finishOpen(bool isInput, oboe::AudioStream *oboeStream) {
if (isInput) {
mFullDuplexEcho->setInputStream(oboeStream);
} else {
mFullDuplexEcho->setOutputStream(oboeStream);
}
}
// ======================================================================= ActivityRoundTripLatency
void ActivityRoundTripLatency::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexLatency.get() == nullptr) {
mFullDuplexLatency = std::make_unique<FullDuplexAnalyzer>(&mEchoAnalyzer);
}
if (!isInput) {
// only output uses a callback, input is polled
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexLatency.get());
}
}
void ActivityRoundTripLatency::finishOpen(bool isInput, AudioStream *oboeStream) {
if (isInput) {
mFullDuplexLatency->setInputStream(oboeStream);
mFullDuplexLatency->setRecording(mRecording.get());
} else {
mFullDuplexLatency->setOutputStream(oboeStream);
}
}
// ======================================================================= ActivityGlitches
void ActivityGlitches::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexGlitches.get() == nullptr) {
mFullDuplexGlitches = std::make_unique<FullDuplexAnalyzer>(&mGlitchAnalyzer);
}
if (!isInput) {
// only output uses a callback, input is polled
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexGlitches.get());
}
}
void ActivityGlitches::finishOpen(bool isInput, oboe::AudioStream *oboeStream) {
if (isInput) {
mFullDuplexGlitches->setInputStream(oboeStream);
mFullDuplexGlitches->setRecording(mRecording.get());
} else {
mFullDuplexGlitches->setOutputStream(oboeStream);
}
}
// ======================================================================= ActivityDataPath
void ActivityDataPath::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexDataPath.get() == nullptr) {
mFullDuplexDataPath = std::make_unique<FullDuplexAnalyzer>(&mDataPathAnalyzer);
}
if (!isInput) {
// only output uses a callback, input is polled
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexDataPath.get());
}
}
void ActivityDataPath::finishOpen(bool isInput, oboe::AudioStream *oboeStream) {
if (isInput) {
mFullDuplexDataPath->setInputStream(oboeStream);
mFullDuplexDataPath->setRecording(mRecording.get());
} else {
mFullDuplexDataPath->setOutputStream(oboeStream);
}
}
// =================================================================== ActivityTestDisconnect
void ActivityTestDisconnect::close(int32_t streamIndex) {
ActivityContext::close(streamIndex);
mSinkFloat.reset();
}
void ActivityTestDisconnect::configureForStart() {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
std::shared_ptr<oboe::AudioStream> inputStream = getInputStream();
if (outputStream) {
mSinkFloat = std::make_unique<SinkFloat>(mChannelCount);
sineOscillator = std::make_unique<SineOscillator>();
monoToMulti = std::make_unique<MonoToMultiConverter>(mChannelCount);
sineOscillator->setSampleRate(outputStream->getSampleRate());
sineOscillator->frequency.setValue(440.0);
sineOscillator->amplitude.setValue(AMPLITUDE_SINE);
sineOscillator->output.connect(&(monoToMulti->input));
monoToMulti->output.connect(&(mSinkFloat->input));
mSinkFloat->pullReset();
audioStreamGateway.setAudioSink(mSinkFloat);
} else if (inputStream) {
audioStreamGateway.setAudioSink(nullptr);
}
oboeCallbackProxy.setCallback(&audioStreamGateway);
}

View File

@ -0,0 +1,767 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_NATIVEAUDIOCONTEXT_H
#define NATIVEOBOE_NATIVEAUDIOCONTEXT_H
#include <jni.h>
#include <sys/system_properties.h>
#include <thread>
#include <unordered_map>
#include <vector>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "aaudio/AAudioExtensions.h"
#include "AudioStreamGateway.h"
#include "flowunits/ImpulseOscillator.h"
#include "flowgraph/ManyToMultiConverter.h"
#include "flowgraph/MonoToMultiConverter.h"
#include "flowgraph/SinkFloat.h"
#include "flowgraph/SinkI16.h"
#include "flowgraph/SinkI24.h"
#include "flowgraph/SinkI32.h"
#include "flowunits/ExponentialShape.h"
#include "flowunits/LinearShape.h"
#include "flowunits/SineOscillator.h"
#include "flowunits/SawtoothOscillator.h"
#include "flowunits/TriangleOscillator.h"
#include "FullDuplexAnalyzer.h"
#include "FullDuplexEcho.h"
#include "FullDuplexStream.h"
#include "analyzer/GlitchAnalyzer.h"
#include "analyzer/DataPathAnalyzer.h"
#include "InputStreamCallbackAnalyzer.h"
#include "MultiChannelRecording.h"
#include "OboeStreamCallbackProxy.h"
#include "PlayRecordingCallback.h"
#include "SawPingGenerator.h"
// These must match order in strings.xml and in StreamConfiguration.java
#define NATIVE_MODE_UNSPECIFIED 0
#define NATIVE_MODE_OPENSLES 1
#define NATIVE_MODE_AAUDIO 2
#define MAX_SINE_OSCILLATORS 8
#define AMPLITUDE_SINE 1.0
#define AMPLITUDE_SAWTOOTH 0.5
#define FREQUENCY_SAW_PING 800.0
#define AMPLITUDE_SAW_PING 0.8
#define AMPLITUDE_IMPULSE 0.7
#define NANOS_PER_MICROSECOND ((int64_t) 1000)
#define NANOS_PER_MILLISECOND (1000 * NANOS_PER_MICROSECOND)
#define NANOS_PER_SECOND (1000 * NANOS_PER_MILLISECOND)
#define SECONDS_TO_RECORD 10
/**
* Abstract base class that corresponds to a test at the Java level.
*/
class ActivityContext {
public:
ActivityContext() {}
virtual ~ActivityContext() = default;
std::shared_ptr<oboe::AudioStream> getStream(int32_t streamIndex) {
auto it = mOboeStreams.find(streamIndex);
if (it != mOboeStreams.end()) {
return it->second;
} else {
return nullptr;
}
}
virtual void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder);
/**
* Open a stream with the given parameters.
* @param nativeApi
* @param sampleRate
* @param channelCount
* @param format
* @param sharingMode
* @param performanceMode
* @param inputPreset
* @param deviceId
* @param sessionId
* @param framesPerBurst
* @param channelConversionAllowed
* @param formatConversionAllowed
* @param rateConversionQuality
* @param isMMap
* @param isInput
* @return stream ID
*/
int open(jint nativeApi,
jint sampleRate,
jint channelCount,
jint format,
jint sharingMode,
jint performanceMode,
jint inputPreset,
jint usage,
jint deviceId,
jint sessionId,
jint framesPerBurst,
jboolean channelConversionAllowed,
jboolean formatConversionAllowed,
jint rateConversionQuality,
jboolean isMMap,
jboolean isInput);
virtual void close(int32_t streamIndex);
virtual void configureForStart() {}
oboe::Result start();
oboe::Result pause();
oboe::Result stopAllStreams();
virtual oboe::Result stop() {
return stopAllStreams();
}
double getCpuLoad() {
return oboeCallbackProxy.getCpuLoad();
}
void setWorkload(double workload) {
oboeCallbackProxy.setWorkload(workload);
}
virtual oboe::Result startPlayback() {
return oboe::Result::OK;
}
virtual oboe::Result stopPlayback() {
return oboe::Result::OK;
}
virtual void runBlockingIO() {};
static void threadCallback(ActivityContext *context) {
context->runBlockingIO();
}
void stopBlockingIOThread() {
if (dataThread != nullptr) {
// stop a thread that runs in place of the callback
threadEnabled.store(false); // ask thread to exit its loop
dataThread->join();
dataThread = nullptr;
}
}
virtual double getPeakLevel(int index) {
return 0.0;
}
static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC) {
struct timespec time;
int result = clock_gettime(clockId, &time);
if (result < 0) {
return result;
}
return (time.tv_sec * NANOS_PER_SECOND) + time.tv_nsec;
}
// Calculate time between beginning and when frame[0] occurred.
int32_t calculateColdStartLatencyMillis(int32_t sampleRate,
int64_t beginTimeNanos,
int64_t timeStampPosition,
int64_t timestampNanos) const {
int64_t elapsedNanos = NANOS_PER_SECOND * (timeStampPosition / (double) sampleRate);
int64_t timeOfFrameZero = timestampNanos - elapsedNanos;
int64_t coldStartLatencyNanos = timeOfFrameZero - beginTimeNanos;
return coldStartLatencyNanos / NANOS_PER_MILLISECOND;
}
int32_t getColdStartInputMillis() {
std::shared_ptr<oboe::AudioStream> oboeStream = getInputStream();
if (oboeStream != nullptr) {
int64_t framesRead = oboeStream->getFramesRead();
if (framesRead > 0) {
// Base latency on the time that frame[0] would have been received by the app.
int64_t nowNanos = getNanoseconds();
return calculateColdStartLatencyMillis(oboeStream->getSampleRate(),
mInputOpenedAt,
framesRead,
nowNanos);
}
}
return -1;
}
int32_t getColdStartOutputMillis() {
std::shared_ptr<oboe::AudioStream> oboeStream = getOutputStream();
if (oboeStream != nullptr) {
auto result = oboeStream->getTimestamp(CLOCK_MONOTONIC);
if (result) {
auto frameTimestamp = result.value();
// Calculate the time that frame[0] would have been played by the speaker.
int64_t position = frameTimestamp.position;
int64_t timestampNanos = frameTimestamp.timestamp;
return calculateColdStartLatencyMillis(oboeStream->getSampleRate(),
mOutputOpenedAt,
position,
timestampNanos);
}
}
return -1;
}
/**
* Trigger a sound or impulse.
* @param enabled
*/
virtual void trigger() {}
bool isMMapUsed(int32_t streamIndex);
int32_t getFramesPerBlock() {
return (callbackSize == 0) ? mFramesPerBurst : callbackSize;
}
int64_t getCallbackCount() {
return oboeCallbackProxy.getCallbackCount();
}
oboe::Result getLastErrorCallbackResult() {
std::shared_ptr<oboe::AudioStream> stream = getOutputStream();
if (stream == nullptr) {
stream = getInputStream();
}
return stream ? oboe::Result::ErrorNull : stream->getLastErrorCallbackResult();
}
int32_t getFramesPerCallback() {
return oboeCallbackProxy.getFramesPerCallback();
}
virtual void setChannelEnabled(int channelIndex, bool enabled) {}
virtual void setSignalType(int signalType) {}
virtual int32_t saveWaveFile(const char *filename);
virtual void setMinimumFramesBeforeRead(int32_t numFrames) {}
static bool mUseCallback;
static int callbackSize;
double getTimestampLatency(int32_t streamIndex);
protected:
std::shared_ptr<oboe::AudioStream> getInputStream();
std::shared_ptr<oboe::AudioStream> getOutputStream();
int32_t allocateStreamIndex();
void freeStreamIndex(int32_t streamIndex);
virtual void createRecording() {
mRecording = std::make_unique<MultiChannelRecording>(mChannelCount,
SECONDS_TO_RECORD * mSampleRate);
}
virtual void finishOpen(bool isInput, oboe::AudioStream *oboeStream) {}
virtual oboe::Result startStreams() = 0;
std::unique_ptr<float []> dataBuffer{};
AudioStreamGateway audioStreamGateway;
OboeStreamCallbackProxy oboeCallbackProxy;
std::unique_ptr<MultiChannelRecording> mRecording{};
int32_t mNextStreamHandle = 0;
std::unordered_map<int32_t, std::shared_ptr<oboe::AudioStream>> mOboeStreams;
int32_t mFramesPerBurst = 0; // TODO per stream
int32_t mChannelCount = 0; // TODO per stream
int32_t mSampleRate = 0; // TODO per stream
std::atomic<bool> threadEnabled{false};
std::thread *dataThread = nullptr; // FIXME never gets deleted
private:
int64_t mInputOpenedAt = 0;
int64_t mOutputOpenedAt = 0;
};
/**
* Test a single input stream.
*/
class ActivityTestInput : public ActivityContext {
public:
ActivityTestInput() {}
virtual ~ActivityTestInput() = default;
void configureForStart() override;
double getPeakLevel(int index) override {
return mInputAnalyzer.getPeakLevel(index);
}
void runBlockingIO() override;
void setMinimumFramesBeforeRead(int32_t numFrames) override {
mInputAnalyzer.setMinimumFramesBeforeRead(numFrames);
mMinimumFramesBeforeRead = numFrames;
}
int32_t getMinimumFramesBeforeRead() const {
return mMinimumFramesBeforeRead;
}
protected:
oboe::Result startStreams() override {
mInputAnalyzer.reset();
mInputAnalyzer.setup(getInputStream()->getFramesPerBurst(),
getInputStream()->getChannelCount(),
getInputStream()->getFormat());
return getInputStream()->requestStart();
}
InputStreamCallbackAnalyzer mInputAnalyzer;
int32_t mMinimumFramesBeforeRead = 0;
};
/**
* Record a configured input stream and play it back some simple way.
*/
class ActivityRecording : public ActivityTestInput {
public:
ActivityRecording() {}
virtual ~ActivityRecording() = default;
oboe::Result stop() override {
oboe::Result resultStopPlayback = stopPlayback();
oboe::Result resultStopAudio = ActivityContext::stop();
return (resultStopPlayback != oboe::Result::OK) ? resultStopPlayback : resultStopAudio;
}
oboe::Result startPlayback() override;
oboe::Result stopPlayback() override;
PlayRecordingCallback mPlayRecordingCallback;
oboe::AudioStream *playbackStream = nullptr;
};
/**
* Test a single output stream.
*/
class ActivityTestOutput : public ActivityContext {
public:
ActivityTestOutput()
: sineOscillators(MAX_SINE_OSCILLATORS)
, sawtoothOscillators(MAX_SINE_OSCILLATORS) {}
virtual ~ActivityTestOutput() = default;
void close(int32_t streamIndex) override;
oboe::Result startStreams() override {
return getOutputStream()->start();
}
void configureForStart() override;
virtual void configureStreamGateway();
void runBlockingIO() override;
void setChannelEnabled(int channelIndex, bool enabled) override;
// WARNING - must match order in strings.xml and OboeAudioOutputStream.java
enum SignalType {
Sine = 0,
Sawtooth = 1,
FreqSweep = 2,
PitchSweep = 3,
WhiteNoise = 4
};
void setSignalType(int signalType) override {
mSignalType = (SignalType) signalType;
}
protected:
SignalType mSignalType = SignalType::Sine;
std::vector<SineOscillator> sineOscillators;
std::vector<SawtoothOscillator> sawtoothOscillators;
static constexpr float kSweepPeriod = 10.0; // for triangle up and down
// A triangle LFO is shaped into either a linear or an exponential range.
TriangleOscillator mTriangleOscillator;
LinearShape mLinearShape;
ExponentialShape mExponentialShape;
std::unique_ptr<ManyToMultiConverter> manyToMulti;
std::unique_ptr<MonoToMultiConverter> monoToMulti;
std::shared_ptr<oboe::flowgraph::SinkFloat> mSinkFloat;
std::shared_ptr<oboe::flowgraph::SinkI16> mSinkI16;
std::shared_ptr<oboe::flowgraph::SinkI24> mSinkI24;
std::shared_ptr<oboe::flowgraph::SinkI32> mSinkI32;
};
/**
* Generate a short beep with a very short attack.
* This is used by Java to measure output latency.
*/
class ActivityTapToTone : public ActivityTestOutput {
public:
ActivityTapToTone() {}
virtual ~ActivityTapToTone() = default;
void configureForStart() override;
virtual void trigger() override {
sawPingGenerator.trigger();
}
SawPingGenerator sawPingGenerator;
};
/**
* Activity that uses synchronized input/output streams.
*/
class ActivityFullDuplex : public ActivityContext {
public:
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
virtual int32_t getState() { return -1; }
virtual int32_t getResult() { return -1; }
virtual bool isAnalyzerDone() { return false; }
void setMinimumFramesBeforeRead(int32_t numFrames) override {
getFullDuplexAnalyzer()->setMinimumFramesBeforeRead(numFrames);
}
virtual FullDuplexAnalyzer *getFullDuplexAnalyzer() = 0;
int32_t getResetCount() {
return getFullDuplexAnalyzer()->getLoopbackProcessor()->getResetCount();
}
protected:
void createRecording() override {
mRecording = std::make_unique<MultiChannelRecording>(2, // output and input
SECONDS_TO_RECORD * mSampleRate);
}
};
/**
* Echo input to output through a delay line.
*/
class ActivityEcho : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
return mFullDuplexEcho->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
void setDelayTime(double delayTimeSeconds) {
if (mFullDuplexEcho) {
mFullDuplexEcho->setDelayTime(delayTimeSeconds);
}
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexEcho.get();
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexEcho> mFullDuplexEcho{};
};
/**
* Measure Round Trip Latency
*/
class ActivityRoundTripLatency : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
mAnalyzerLaunched = false;
return mFullDuplexLatency->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
LatencyAnalyzer *getLatencyAnalyzer() {
return &mEchoAnalyzer;
}
int32_t getState() override {
return getLatencyAnalyzer()->getState();
}
int32_t getResult() override {
return getLatencyAnalyzer()->getState(); // TODO This does not look right.
}
bool isAnalyzerDone() override {
if (!mAnalyzerLaunched) {
mAnalyzerLaunched = launchAnalysisIfReady();
}
return mEchoAnalyzer.isDone();
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexLatency.get();
}
static void analyzeData(PulseLatencyAnalyzer *analyzer) {
analyzer->analyze();
}
bool launchAnalysisIfReady() {
// Are we ready to do the analysis?
if (mEchoAnalyzer.hasEnoughData()) {
// Crunch the numbers on a separate thread.
std::thread t(analyzeData, &mEchoAnalyzer);
t.detach();
return true;
}
return false;
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexAnalyzer> mFullDuplexLatency{};
PulseLatencyAnalyzer mEchoAnalyzer;
bool mAnalyzerLaunched = false;
};
/**
* Measure Glitches
*/
class ActivityGlitches : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
return mFullDuplexGlitches->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
GlitchAnalyzer *getGlitchAnalyzer() {
return &mGlitchAnalyzer;
}
int32_t getState() override {
return getGlitchAnalyzer()->getState();
}
int32_t getResult() override {
return getGlitchAnalyzer()->getResult();
}
bool isAnalyzerDone() override {
return mGlitchAnalyzer.isDone();
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexGlitches.get();
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexAnalyzer> mFullDuplexGlitches{};
GlitchAnalyzer mGlitchAnalyzer;
};
/**
* Measure Data Path
*/
class ActivityDataPath : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
return mFullDuplexDataPath->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
void configureForStart() override {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
int32_t capacityInFrames = outputStream->getBufferCapacityInFrames();
int32_t burstInFrames = outputStream->getFramesPerBurst();
int32_t capacityInBursts = capacityInFrames / burstInFrames;
int32_t sizeInBursts = std::max(2, capacityInBursts / 2);
// Set size of buffer to minimize underruns.
auto result = outputStream->setBufferSizeInFrames(sizeInBursts * burstInFrames);
static_cast<void>(result); // Avoid unused variable.
LOGD("ActivityDataPath: %s() capacity = %d, burst = %d, size = %d",
__func__, capacityInFrames, burstInFrames, result.value());
}
DataPathAnalyzer *getDataPathAnalyzer() {
return &mDataPathAnalyzer;
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexDataPath.get();
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexAnalyzer> mFullDuplexDataPath{};
DataPathAnalyzer mDataPathAnalyzer;
};
/**
* Test a single output stream.
*/
class ActivityTestDisconnect : public ActivityContext {
public:
ActivityTestDisconnect() {}
virtual ~ActivityTestDisconnect() = default;
void close(int32_t streamIndex) override;
oboe::Result startStreams() override {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (outputStream) {
return outputStream->start();
}
std::shared_ptr<oboe::AudioStream> inputStream = getInputStream();
if (inputStream) {
return inputStream->start();
}
return oboe::Result::ErrorNull;
}
void configureForStart() override;
private:
std::unique_ptr<SineOscillator> sineOscillator;
std::unique_ptr<MonoToMultiConverter> monoToMulti;
std::shared_ptr<oboe::flowgraph::SinkFloat> mSinkFloat;
};
/**
* Switch between various
*/
class NativeAudioContext {
public:
ActivityContext *getCurrentActivity() {
return currentActivity;
};
void setActivityType(int activityType) {
mActivityType = (ActivityType) activityType;
switch(mActivityType) {
default:
case ActivityType::Undefined:
case ActivityType::TestOutput:
currentActivity = &mActivityTestOutput;
break;
case ActivityType::TestInput:
currentActivity = &mActivityTestInput;
break;
case ActivityType::TapToTone:
currentActivity = &mActivityTapToTone;
break;
case ActivityType::RecordPlay:
currentActivity = &mActivityRecording;
break;
case ActivityType::Echo:
currentActivity = &mActivityEcho;
break;
case ActivityType::RoundTripLatency:
currentActivity = &mActivityRoundTripLatency;
break;
case ActivityType::Glitches:
currentActivity = &mActivityGlitches;
break;
case ActivityType::TestDisconnect:
currentActivity = &mActivityTestDisconnect;
break;
case ActivityType::DataPath:
currentActivity = &mActivityDataPath;
break;
}
}
void setDelayTime(double delayTimeMillis) {
mActivityEcho.setDelayTime(delayTimeMillis);
}
ActivityTestOutput mActivityTestOutput;
ActivityTestInput mActivityTestInput;
ActivityTapToTone mActivityTapToTone;
ActivityRecording mActivityRecording;
ActivityEcho mActivityEcho;
ActivityRoundTripLatency mActivityRoundTripLatency;
ActivityGlitches mActivityGlitches;
ActivityDataPath mActivityDataPath;
ActivityTestDisconnect mActivityTestDisconnect;
private:
// WARNING - must match definitions in TestAudioActivity.java
enum ActivityType {
Undefined = -1,
TestOutput = 0,
TestInput = 1,
TapToTone = 2,
RecordPlay = 3,
Echo = 4,
RoundTripLatency = 5,
Glitches = 6,
TestDisconnect = 7,
DataPath = 8,
};
ActivityType mActivityType = ActivityType::Undefined;
ActivityContext *currentActivity = &mActivityTestOutput;
};
#endif //NATIVEOBOE_NATIVEAUDIOCONTEXT_H

View File

@ -0,0 +1,81 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "OboeStreamCallbackProxy.h"
// Linear congruential random number generator.
static uint32_t s_random16() {
static uint32_t seed = 1234;
seed = ((seed * 31421) + 6927) & 0x0FFFF;
return seed;
}
/**
* The random number generator is good for burning CPU because the compiler cannot
* easily optimize away the computation.
* @param workload number of times to execute the loop
* @return a white noise value between -1.0 and +1.0
*/
static float s_burnCPU(int32_t workload) {
uint32_t random = 0;
for (int32_t i = 0; i < workload; i++) {
for (int32_t j = 0; j < 10; j++) {
random = random ^ s_random16();
}
}
return (random - 32768) * (1.0 / 32768);
}
bool OboeStreamCallbackProxy::mCallbackReturnStop = false;
int64_t OboeStreamCallbackProxy::getNanoseconds(clockid_t clockId) {
struct timespec time;
int result = clock_gettime(clockId, &time);
if (result < 0) {
return result;
}
return (time.tv_sec * 1e9) + time.tv_nsec;
}
oboe::DataCallbackResult OboeStreamCallbackProxy::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Stop;
int64_t startTimeNanos = getNanoseconds();
mCallbackCount++;
mFramesPerCallback = numFrames;
if (mCallbackReturnStop) {
return oboe::DataCallbackResult::Stop;
}
s_burnCPU((int32_t)(mWorkload * kWorkloadScaler * numFrames));
if (mCallback != nullptr) {
callbackResult = mCallback->onAudioReady(audioStream, audioData, numFrames);
}
// Update CPU load
double calculationTime = (double)(getNanoseconds() - startTimeNanos);
double inverseRealTime = audioStream->getSampleRate() / (1.0e9 * numFrames);
double currentCpuLoad = calculationTime * inverseRealTime; // avoid a divide
mCpuLoad = (mCpuLoad * 0.95) + (currentCpuLoad * 0.05); // simple low pass filter
return callbackResult;
}

View File

@ -0,0 +1,88 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_OBOESTREAMCALLBACKPROXY_H
#define NATIVEOBOE_OBOESTREAMCALLBACKPROXY_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
class OboeStreamCallbackProxy : public oboe::AudioStreamCallback {
public:
void setCallback(oboe::AudioStreamCallback *callback) {
mCallback = callback;
setCallbackCount(0);
}
static void setCallbackReturnStop(bool b) {
mCallbackReturnStop = b;
}
int64_t getCallbackCount() {
return mCallbackCount;
}
void setCallbackCount(int64_t count) {
mCallbackCount = count;
}
int32_t getFramesPerCallback() {
return mFramesPerCallback.load();
}
/**
* Called when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
/**
* Specify the amount of artificial workload that will waste CPU cycles
* and increase the CPU load.
* @param workload typically ranges from 0.0 to 100.0
*/
void setWorkload(double workload) {
mWorkload = std::max(0.0, workload);
}
double getWorkload() const {
return mWorkload;
}
double getCpuLoad() const {
return mCpuLoad;
}
static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC);
private:
static constexpr int32_t kWorkloadScaler = 500;
double mWorkload = 0.0;
std::atomic<double> mCpuLoad{0};
oboe::AudioStreamCallback *mCallback = nullptr;
static bool mCallbackReturnStop;
int64_t mCallbackCount = 0;
std::atomic<int32_t> mFramesPerCallback{0};
};
#endif //NATIVEOBOE_OBOESTREAMCALLBACKPROXY_H

View File

@ -0,0 +1,40 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "AudioStreamGateway.h"
#include "oboe/Oboe.h"
#include "common/OboeDebug.h"
#include <sched.h>
#include <cstring>
#include "OboeTesterStreamCallback.h"
// Print if scheduler changes.
void OboeTesterStreamCallback::printScheduler() {
#if OBOE_ENABLE_LOGGING
int scheduler = sched_getscheduler(gettid());
if (scheduler != mPreviousScheduler) {
int schedulerType = scheduler & 0xFFFF; // mask off high flags
LOGD("callback CPU scheduler = 0x%08x = %s",
scheduler,
((schedulerType == SCHED_FIFO) ? "SCHED_FIFO" :
((schedulerType == SCHED_OTHER) ? "SCHED_OTHER" :
((schedulerType == SCHED_RR) ? "SCHED_RR" : "UNKNOWN")))
);
mPreviousScheduler = scheduler;
}
#endif
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_STREAM_CALLBACK_H
#define OBOETESTER_STREAM_CALLBACK_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "oboe/Oboe.h"
class OboeTesterStreamCallback : public oboe::AudioStreamCallback {
public:
virtual ~OboeTesterStreamCallback() = default;
// Call this before starting.
void reset() {
mPreviousScheduler = -1;
}
protected:
void printScheduler();
int mPreviousScheduler = -1;
};
#endif //OBOETESTER_STREAM_CALLBACK_H

View File

@ -0,0 +1,33 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "PlayRecordingCallback.h"
/**
* Called when the stream is ready to process audio.
*/
oboe::DataCallbackResult PlayRecordingCallback::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
float *floatData = (float *)audioData;
// Read stored data into the buffer provided.
int32_t framesRead = mRecording->read(floatData, numFrames);
// LOGI("%s() framesRead = %d, numFrames = %d", __func__, framesRead, numFrames);
return framesRead > 0
? oboe::DataCallbackResult::Continue
: oboe::DataCallbackResult::Stop;
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_PLAY_RECORDING_CALLBACK_H
#define NATIVEOBOE_PLAY_RECORDING_CALLBACK_H
#include "oboe/Oboe.h"
#include "MultiChannelRecording.h"
class PlayRecordingCallback : public oboe::AudioStreamCallback {
public:
PlayRecordingCallback() {}
~PlayRecordingCallback() = default;
void setRecording(MultiChannelRecording *recording) {
mRecording = recording;
}
/**
* Called when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames);
private:
MultiChannelRecording *mRecording = nullptr;
};
#endif //NATIVEOBOE_PLAYRECORDINGCALLBACK_H

View File

@ -0,0 +1,69 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "common/OboeDebug.h"
#include "oboe/Definitions.h"
#include "SawPingGenerator.h"
using namespace oboe::flowgraph;
SawPingGenerator::SawPingGenerator()
: OscillatorBase()
, mRequestCount(0)
, mAcknowledgeCount(0)
, mLevel(0.0f) {
}
SawPingGenerator::~SawPingGenerator() { }
void SawPingGenerator::reset() {
FlowGraphNode::reset();
mAcknowledgeCount.store(mRequestCount.load());
}
int32_t SawPingGenerator::onProcess(int numFrames) {
const float *frequencies = frequency.getBuffer();
const float *amplitudes = amplitude.getBuffer();
float *buffer = output.getBuffer();
if (mRequestCount.load() > mAcknowledgeCount.load()) {
mPhase = -1.0f;
mLevel = 1.0;
mAcknowledgeCount++;
}
// Check level to prevent numeric underflow.
if (mLevel > 0.000001) {
for (int i = 0; i < numFrames; i++) {
float sawtooth = incrementPhase(frequencies[i]);
*buffer++ = (float) (sawtooth * mLevel * amplitudes[i]);
mLevel *= 0.999;
}
} else {
for (int i = 0; i < numFrames; i++) {
*buffer++ = 0.0f;
}
}
return numFrames;
}
void SawPingGenerator::trigger() {
mRequestCount++;
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_SAWPINGGENERATOR_H
#define NATIVEOBOE_SAWPINGGENERATOR_H
#include <atomic>
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "flowunits/OscillatorBase.h"
class SawPingGenerator : public OscillatorBase {
public:
SawPingGenerator();
virtual ~SawPingGenerator();
int32_t onProcess(int numFrames) override;
void trigger();
void reset() override;
private:
std::atomic<int> mRequestCount; // external thread increments this to request a beep
std::atomic<int> mAcknowledgeCount; // audio thread sets this to acknowledge
double mLevel;
};
#endif //NATIVEOBOE_SAWPINGGENERATOR_H

View File

@ -0,0 +1,203 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_BASE_SINE_ANALYZER_H
#define ANALYZER_BASE_SINE_ANALYZER_H
#include <algorithm>
#include <cctype>
#include <iomanip>
#include <iostream>
#include "InfiniteRecording.h"
#include "LatencyAnalyzer.h"
/**
* Output a steady sine wave and analyze the return signal.
*
* Use a cosine transform to measure the predicted magnitude and relative phase of the
* looped back sine wave. Then generate a predicted signal and compare with the actual signal.
*/
class BaseSineAnalyzer : public LoopbackProcessor {
public:
BaseSineAnalyzer()
: LoopbackProcessor()
, mInfiniteRecording(64 * 1024) {}
virtual bool isOutputEnabled() { return true; }
void setMagnitude(double magnitude) {
mMagnitude = magnitude;
mScaledTolerance = mMagnitude * mTolerance;
}
double getPhaseOffset() {
return mPhaseOffset;
}
double getMagnitude() const {
return mMagnitude;
}
void setInputChannel(int inputChannel) {
mInputChannel = inputChannel;
}
int getInputChannel() const {
return mInputChannel;
}
void setOutputChannel(int outputChannel) {
mOutputChannel = outputChannel;
}
int getOutputChannel() const {
return mOutputChannel;
}
void setNoiseAmplitude(double noiseAmplitude) {
mNoiseAmplitude = noiseAmplitude;
}
double getNoiseAmplitude() const {
return mNoiseAmplitude;
}
double getTolerance() {
return mTolerance;
}
void setTolerance(double tolerance) {
mTolerance = tolerance;
}
// advance and wrap phase
void incrementOutputPhase() {
mOutputPhase += mPhaseIncrement;
if (mOutputPhase > M_PI) {
mOutputPhase -= (2.0 * M_PI);
}
}
/**
* @param frameData upon return, contains the reference sine wave
* @param channelCount
*/
result_code processOutputFrame(float *frameData, int channelCount) override {
float output = 0.0f;
// Output sine wave so we can measure it.
if (isOutputEnabled()) {
float sinOut = sinf(mOutputPhase);
incrementOutputPhase();
output = (sinOut * mOutputAmplitude)
+ (mWhiteNoise.nextRandomDouble() * mNoiseAmplitude);
// ALOGD("sin(%f) = %f, %f\n", mOutputPhase, sinOut, mPhaseIncrement);
}
for (int i = 0; i < channelCount; i++) {
frameData[i] = (i == mOutputChannel) ? output : 0.0f;
}
return RESULT_OK;
}
/**
* Calculate the magnitude of the component of the input signal
* that matches the analysis frequency.
* Also calculate the phase that we can use to create a
* signal that matches that component.
* The phase will be between -PI and +PI.
*/
double calculateMagnitudePhase(double *phasePtr = nullptr) {
if (mFramesAccumulated == 0) {
return 0.0;
}
double sinMean = mSinAccumulator / mFramesAccumulated;
double cosMean = mCosAccumulator / mFramesAccumulated;
double magnitude = 2.0 * sqrt((sinMean * sinMean) + (cosMean * cosMean));
if (phasePtr != nullptr) {
double phase = M_PI_2 - atan2(sinMean, cosMean);
*phasePtr = phase;
}
return magnitude;
}
bool transformSample(float sample, float referencePhase) {
// Track incoming signal and slowly adjust magnitude to account
// for drift in the DRC or AGC.
mSinAccumulator += sample * sinf(referencePhase);
mCosAccumulator += sample * cosf(referencePhase);
mFramesAccumulated++;
// Must be a multiple of the period or the calculation will not be accurate.
if (mFramesAccumulated == mSinePeriod) {
const double coefficient = 0.1;
double magnitude = calculateMagnitudePhase(&mPhaseOffset);
// One pole averaging filter.
setMagnitude((mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient));
return true;
} else {
return false;
}
}
// reset the sine wave detector
virtual void resetAccumulator() {
mFramesAccumulated = 0;
mSinAccumulator = 0.0;
mCosAccumulator = 0.0;
}
void reset() override {
LoopbackProcessor::reset();
resetAccumulator();
}
void prepareToTest() override {
LoopbackProcessor::prepareToTest();
mSinePeriod = getSampleRate() / kTargetGlitchFrequency;
mOutputPhase = 0.0f;
mInverseSinePeriod = 1.0 / mSinePeriod;
mPhaseIncrement = 2.0 * M_PI * mInverseSinePeriod;
}
protected:
static constexpr int32_t kTargetGlitchFrequency = 1000;
int32_t mSinePeriod = 1; // this will be set before use
double mInverseSinePeriod = 1.0;
double mPhaseIncrement = 0.0;
double mOutputPhase = 0.0;
double mOutputAmplitude = 0.75;
// If this jumps around then we are probably just hearing noise.
double mPhaseOffset = 0.0;
double mMagnitude = 0.0;
int32_t mFramesAccumulated = 0;
double mSinAccumulator = 0.0;
double mCosAccumulator = 0.0;
double mScaledTolerance = 0.0;
InfiniteRecording<float> mInfiniteRecording;
private:
int32_t mInputChannel = 0;
int32_t mOutputChannel = 0;
float mTolerance = 0.10; // scaled from 0.0 to 1.0
float mNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
PseudoRandom mWhiteNoise;
};
#endif //ANALYZER_BASE_SINE_ANALYZER_H

View File

@ -0,0 +1,94 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_DATA_PATH_ANALYZER_H
#define ANALYZER_DATA_PATH_ANALYZER_H
#include <algorithm>
#include <cctype>
#include <iomanip>
#include <iostream>
#include <math.h>
#include "BaseSineAnalyzer.h"
#include "InfiniteRecording.h"
#include "LatencyAnalyzer.h"
/**
* Output a steady sine wave and analyze the return signal.
*
* Use a cosine transform to measure the predicted magnitude and relative phase of the
* looped back sine wave.
*/
class DataPathAnalyzer : public BaseSineAnalyzer {
public:
DataPathAnalyzer() : BaseSineAnalyzer() {
// Add a little bit of noise to reduce blockage by speaker protection and DRC.
setNoiseAmplitude(0.05);
}
/**
* @param frameData contains microphone data with sine signal feedback
* @param channelCount
*/
result_code processInputFrame(const float *frameData, int /* channelCount */) override {
result_code result = RESULT_OK;
float sample = frameData[getInputChannel()];
mInfiniteRecording.write(sample);
if (transformSample(sample, mOutputPhase)) {
resetAccumulator();
}
// Update MaxMagnitude if we are locked.
double diff = abs(mPhaseOffset - mPreviousPhaseOffset);
if (diff < mPhaseTolerance) {
mMaxMagnitude = std::max(mMagnitude, mMaxMagnitude);
}
mPreviousPhaseOffset = mPhaseOffset;
return result;
}
std::string analyze() override {
std::stringstream report;
report << "DataPathAnalyzer ------------------\n";
report << LOOPBACK_RESULT_TAG "sine.magnitude = " << std::setw(8)
<< mMagnitude << "\n";
report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
<< mFramesAccumulated << "\n";
report << LOOPBACK_RESULT_TAG "sine.period = " << std::setw(8)
<< mSinePeriod << "\n";
return report.str();
}
void reset() override {
BaseSineAnalyzer::reset();
mPreviousPhaseOffset = 999.0; // Arbitrary high offset to prevent early lock.
mMaxMagnitude = 0.0;
}
double getMaxMagnitude() {
return mMaxMagnitude;
}
private:
double mPreviousPhaseOffset = 0.0;
double mPhaseTolerance = 2 * M_PI / 48;
double mMaxMagnitude = 0.0;
};
#endif // ANALYZER_DATA_PATH_ANALYZER_H

View File

@ -0,0 +1,363 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_GLITCH_ANALYZER_H
#define ANALYZER_GLITCH_ANALYZER_H
#include <algorithm>
#include <cctype>
#include <iomanip>
#include <iostream>
#include "InfiniteRecording.h"
#include "LatencyAnalyzer.h"
#include "BaseSineAnalyzer.h"
#include "PseudoRandom.h"
/**
* Output a steady sine wave and analyze the return signal.
*
* Use a cosine transform to measure the predicted magnitude and relative phase of the
* looped back sine wave. Then generate a predicted signal and compare with the actual signal.
*/
class GlitchAnalyzer : public BaseSineAnalyzer {
public:
GlitchAnalyzer() : BaseSineAnalyzer() {}
int32_t getState() const {
return mState;
}
double getPeakAmplitude() const {
return mPeakFollower.getLevel();
}
int32_t getGlitchCount() const {
return mGlitchCount;
}
int32_t getStateFrameCount(int state) const {
return mStateFrameCounters[state];
}
double getSignalToNoiseDB() {
static const double threshold = 1.0e-14;
if (mMeanSquareSignal < threshold || mMeanSquareNoise < threshold) {
return 0.0;
} else {
double signalToNoise = mMeanSquareSignal / mMeanSquareNoise; // power ratio
double signalToNoiseDB = 10.0 * log(signalToNoise);
if (signalToNoiseDB < MIN_SNR_DB) {
ALOGD("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.",
MIN_SNR_DB);
setResult(ERROR_VOLUME_TOO_LOW);
}
return signalToNoiseDB;
}
}
std::string analyze() override {
std::stringstream report;
report << "GlitchAnalyzer ------------------\n";
report << LOOPBACK_RESULT_TAG "peak.amplitude = " << std::setw(8)
<< getPeakAmplitude() << "\n";
report << LOOPBACK_RESULT_TAG "sine.magnitude = " << std::setw(8)
<< mMagnitude << "\n";
report << LOOPBACK_RESULT_TAG "rms.noise = " << std::setw(8)
<< mMeanSquareNoise << "\n";
report << LOOPBACK_RESULT_TAG "signal.to.noise.db = " << std::setw(8)
<< getSignalToNoiseDB() << "\n";
report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
<< mFramesAccumulated << "\n";
report << LOOPBACK_RESULT_TAG "sine.period = " << std::setw(8)
<< mSinePeriod << "\n";
report << LOOPBACK_RESULT_TAG "test.state = " << std::setw(8)
<< mState << "\n";
report << LOOPBACK_RESULT_TAG "frame.count = " << std::setw(8)
<< mFrameCounter << "\n";
// Did we ever get a lock?
bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
if (!gotLock) {
report << "ERROR - failed to lock on reference sine tone.\n";
setResult(ERROR_NO_LOCK);
} else {
// Only print if meaningful.
report << LOOPBACK_RESULT_TAG "glitch.count = " << std::setw(8)
<< mGlitchCount << "\n";
report << LOOPBACK_RESULT_TAG "max.glitch = " << std::setw(8)
<< mMaxGlitchDelta << "\n";
if (mGlitchCount > 0) {
report << "ERROR - number of glitches > 0\n";
setResult(ERROR_GLITCHES);
}
}
return report.str();
}
void printStatus() override {
ALOGD("st = %d, #gl = %3d,", mState, mGlitchCount);
}
/**
* @param frameData contains microphone data with sine signal feedback
* @param channelCount
*/
result_code processInputFrame(const float *frameData, int /* channelCount */) override {
result_code result = RESULT_OK;
float sample = frameData[0];
float peak = mPeakFollower.process(sample);
mInfiniteRecording.write(sample);
// Force a periodic glitch to test the detector!
if (mForceGlitchDuration > 0) {
if (mForceGlitchCounter == 0) {
ALOGE("%s: force a glitch!!", __func__);
mForceGlitchCounter = getSampleRate();
} else if (mForceGlitchCounter <= mForceGlitchDuration) {
// Force an abrupt offset.
sample += (sample > 0.0) ? -0.5f : 0.5f;
}
--mForceGlitchCounter;
}
mStateFrameCounters[mState]++; // count how many frames we are in each state
switch (mState) {
case STATE_IDLE:
mDownCounter--;
if (mDownCounter <= 0) {
mState = STATE_IMMUNE;
mDownCounter = IMMUNE_FRAME_COUNT;
mInputPhase = 0.0; // prevent spike at start
mOutputPhase = 0.0;
}
break;
case STATE_IMMUNE:
mDownCounter--;
if (mDownCounter <= 0) {
mState = STATE_WAITING_FOR_SIGNAL;
}
break;
case STATE_WAITING_FOR_SIGNAL:
if (peak > mThreshold) {
mState = STATE_WAITING_FOR_LOCK;
//ALOGD("%5d: switch to STATE_WAITING_FOR_LOCK", mFrameCounter);
resetAccumulator();
}
break;
case STATE_WAITING_FOR_LOCK:
mSinAccumulator += sample * sinf(mInputPhase);
mCosAccumulator += sample * cosf(mInputPhase);
mFramesAccumulated++;
// Must be a multiple of the period or the calculation will not be accurate.
if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
setMagnitude(calculateMagnitudePhase(&mPhaseOffset));
// ALOGD("%s() mag = %f, offset = %f, prev = %f",
// __func__, mMagnitude, mPhaseOffset, mPreviousPhaseOffset);
if (mMagnitude > mThreshold) {
if (abs(mPhaseOffset) < kMaxPhaseError) {
mState = STATE_LOCKED;
// ALOGD("%5d: switch to STATE_LOCKED", mFrameCounter);
}
// Adjust mInputPhase to match measured phase
mInputPhase += mPhaseOffset;
}
resetAccumulator();
}
incrementInputPhase();
break;
case STATE_LOCKED: {
// Predict next sine value
double predicted = sinf(mInputPhase) * mMagnitude;
double diff = predicted - sample;
double absDiff = fabs(diff);
mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
if (absDiff > mScaledTolerance) {
result = ERROR_GLITCHES;
onGlitchStart();
// LOGI("diff glitch detected, absDiff = %g", absDiff);
} else {
mSumSquareSignal += predicted * predicted;
mSumSquareNoise += diff * diff;
// Track incoming signal and slowly adjust magnitude to account
// for drift in the DRC or AGC.
// Must be a multiple of the period or the calculation will not be accurate.
if (transformSample(sample, mInputPhase)) {
mMeanSquareNoise = mSumSquareNoise * mInverseSinePeriod;
mMeanSquareSignal = mSumSquareSignal * mInverseSinePeriod;
resetAccumulator();
if (abs(mPhaseOffset) > kMaxPhaseError) {
result = ERROR_GLITCHES;
onGlitchStart();
ALOGD("phase glitch detected, phaseOffset = %g", mPhaseOffset);
} else if (mMagnitude < mThreshold) {
result = ERROR_GLITCHES;
onGlitchStart();
ALOGD("magnitude glitch detected, mMagnitude = %g", mMagnitude);
}
}
}
incrementInputPhase();
} break;
case STATE_GLITCHING: {
// Predict next sine value
mGlitchLength++;
double predicted = sinf(mInputPhase) * mMagnitude;
double diff = predicted - sample;
double absDiff = fabs(diff);
mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
if (absDiff < mScaledTolerance) { // close enough?
// If we get a full sine period of non-glitch samples in a row then consider the glitch over.
// We don't want to just consider a zero crossing the end of a glitch.
if (mNonGlitchCount++ > mSinePeriod) {
onGlitchEnd();
}
} else {
mNonGlitchCount = 0;
if (mGlitchLength > (4 * mSinePeriod)) {
relock();
}
}
incrementInputPhase();
} break;
case NUM_STATES: // not a real state
break;
}
mFrameCounter++;
return result;
}
// advance and wrap phase
void incrementInputPhase() {
mInputPhase += mPhaseIncrement;
if (mInputPhase > M_PI) {
mInputPhase -= (2.0 * M_PI);
}
}
bool isOutputEnabled() override { return mState != STATE_IDLE; }
void onGlitchStart() {
mGlitchCount++;
// ALOGD("%5d: STARTED a glitch # %d", mFrameCounter, mGlitchCount);
mState = STATE_GLITCHING;
mGlitchLength = 1;
mNonGlitchCount = 0;
mLastGlitchPosition = mInfiniteRecording.getTotalWritten();
}
void onGlitchEnd() {
// ALOGD("%5d: ENDED a glitch # %d, length = %d", mFrameCounter, mGlitchCount, mGlitchLength);
mState = STATE_LOCKED;
resetAccumulator();
}
// reset the sine wave detector
void resetAccumulator() override {
BaseSineAnalyzer::resetAccumulator();
mSumSquareSignal = 0.0;
mSumSquareNoise = 0.0;
}
void relock() {
// ALOGD("relock: %d because of a very long %d glitch", mFrameCounter, mGlitchLength);
mState = STATE_WAITING_FOR_LOCK;
resetAccumulator();
}
void reset() override {
BaseSineAnalyzer::reset();
mState = STATE_IDLE;
mDownCounter = IDLE_FRAME_COUNT;
}
void prepareToTest() override {
BaseSineAnalyzer::prepareToTest();
mGlitchCount = 0;
mMaxGlitchDelta = 0.0;
for (int i = 0; i < NUM_STATES; i++) {
mStateFrameCounters[i] = 0;
}
}
int32_t getLastGlitch(float *buffer, int32_t length) {
return mInfiniteRecording.readFrom(buffer, mLastGlitchPosition - 32, length);
}
private:
// These must match the values in GlitchActivity.java
enum sine_state_t {
STATE_IDLE, // beginning
STATE_IMMUNE, // ignoring input, waiting fo HW to settle
STATE_WAITING_FOR_SIGNAL, // looking for a loud signal
STATE_WAITING_FOR_LOCK, // trying to lock onto the phase of the sine
STATE_LOCKED, // locked on the sine wave, looking for glitches
STATE_GLITCHING, // locked on the sine wave but glitching
NUM_STATES
};
enum constants {
// Arbitrary durations, assuming 48000 Hz
IDLE_FRAME_COUNT = 48 * 100,
IMMUNE_FRAME_COUNT = 48 * 100,
PERIODS_NEEDED_FOR_LOCK = 8,
MIN_SNR_DB = 65
};
static constexpr double kMaxPhaseError = M_PI * 0.05;
double mThreshold = 0.005;
int32_t mStateFrameCounters[NUM_STATES];
sine_state_t mState = STATE_IDLE;
int64_t mLastGlitchPosition;
double mInputPhase = 0.0;
double mMaxGlitchDelta = 0.0;
int32_t mGlitchCount = 0;
int32_t mNonGlitchCount = 0;
int32_t mGlitchLength = 0;
int mDownCounter = IDLE_FRAME_COUNT;
int32_t mFrameCounter = 0;
int32_t mForceGlitchDuration = 0; // if > 0 then force a glitch for debugging
int32_t mForceGlitchCounter = 4 * 48000; // count down and trigger at zero
// measure background noise continuously as a deviation from the expected signal
double mSumSquareSignal = 0.0;
double mSumSquareNoise = 0.0;
double mMeanSquareSignal = 0.0;
double mMeanSquareNoise = 0.0;
PeakDetector mPeakFollower;
};
#endif //ANALYZER_GLITCH_ANALYZER_H

View File

@ -0,0 +1,67 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_INFINITE_RECORDING_H
#define OBOETESTER_INFINITE_RECORDING_H
#include <memory>
#include <unistd.h>
/**
* Record forever. Keep last data.
*/
template <typename T>
class InfiniteRecording {
public:
InfiniteRecording(size_t maxSamples)
: mMaxSamples(maxSamples) {
mData = std::make_unique<T[]>(mMaxSamples);
}
int32_t readFrom(T *buffer, size_t position, size_t count) {
const size_t maxPosition = mWritten.load();
position = std::min(position, maxPosition);
size_t numToRead = std::min(count, mMaxSamples);
numToRead = std::min(numToRead, maxPosition - position);
if (numToRead == 0) return 0;
// We may need to read in two parts if it wraps.
const size_t offset = position % mMaxSamples;
const size_t firstReadSize = std::min(numToRead, mMaxSamples - offset); // till end
std::copy(&mData[offset], &mData[offset + firstReadSize], buffer);
if (firstReadSize < numToRead) {
// Second read needed.
std::copy(&mData[0], &mData[numToRead - firstReadSize], &buffer[firstReadSize]);
}
return numToRead;
}
void write(T sample) {
const size_t position = mWritten.load();
const size_t offset = position % mMaxSamples;
mData[offset] = sample;
mWritten++;
}
int64_t getTotalWritten() {
return mWritten.load();
}
private:
std::unique_ptr<T[]> mData;
std::atomic<size_t> mWritten{0};
const size_t mMaxSamples;
};
#endif //OBOETESTER_INFINITE_RECORDING_H

View File

@ -0,0 +1,622 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tools for measuring latency and for detecting glitches.
* These classes are pure math and can be used with any audio system.
*/
#ifndef ANALYZER_LATENCY_ANALYZER_H
#define ANALYZER_LATENCY_ANALYZER_H
#include <algorithm>
#include <assert.h>
#include <cctype>
#include <iomanip>
#include <iostream>
#include <math.h>
#include <memory>
#include <sstream>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <vector>
#include "PeakDetector.h"
#include "PseudoRandom.h"
#include "RandomPulseGenerator.h"
// This is used when the code is in Oboe.
#ifndef ALOGD
#define ALOGD LOGD
#define ALOGE LOGE
#define ALOGW LOGW
#endif
#define LOOPBACK_RESULT_TAG "RESULT: "
static constexpr int32_t kDefaultSampleRate = 48000;
static constexpr int32_t kMillisPerSecond = 1000; // by definition
static constexpr int32_t kMaxLatencyMillis = 1000; // arbitrary and generous
static constexpr double kMinimumConfidence = 0.2;
struct LatencyReport {
int32_t latencyInFrames = 0.0;
double confidence = 0.0;
void reset() {
latencyInFrames = 0;
confidence = 0.0;
}
};
// Calculate a normalized cross correlation.
static double calculateNormalizedCorrelation(const float *a,
const float *b,
int windowSize) {
double correlation = 0.0;
double sumProducts = 0.0;
double sumSquares = 0.0;
// Correlate a against b.
for (int i = 0; i < windowSize; i++) {
float s1 = a[i];
float s2 = b[i];
// Use a normalized cross-correlation.
sumProducts += s1 * s2;
sumSquares += ((s1 * s1) + (s2 * s2));
}
if (sumSquares >= 1.0e-9) {
correlation = 2.0 * sumProducts / sumSquares;
}
return correlation;
}
static double calculateRootMeanSquare(float *data, int32_t numSamples) {
double sum = 0.0;
for (int32_t i = 0; i < numSamples; i++) {
float sample = data[i];
sum += sample * sample;
}
return sqrt(sum / numSamples);
}
/**
* Monophonic recording with processing.
*/
class AudioRecording
{
public:
void allocate(int maxFrames) {
mData = std::make_unique<float[]>(maxFrames);
mMaxFrames = maxFrames;
}
// Write SHORT data from the first channel.
int32_t write(const int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
}
for (int i = 0; i < numFrames; i++) {
mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
}
return numFrames;
}
// Write FLOAT data from the first channel.
int32_t write(const float *inputData, int32_t inputChannelCount, int32_t numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
}
for (int i = 0; i < numFrames; i++) {
mData[mFrameCounter++] = inputData[i * inputChannelCount];
}
return numFrames;
}
// Write FLOAT data from the first channel.
int32_t write(float sample) {
// stop at end of buffer
if (mFrameCounter < mMaxFrames) {
mData[mFrameCounter++] = sample;
return 1;
}
return 0;
}
void clear() {
mFrameCounter = 0;
}
int32_t size() const {
return mFrameCounter;
}
bool isFull() const {
return mFrameCounter >= mMaxFrames;
}
float *getData() const {
return mData.get();
}
void setSampleRate(int32_t sampleRate) {
mSampleRate = sampleRate;
}
int32_t getSampleRate() const {
return mSampleRate;
}
/**
* Square the samples so they are all positive and so the peaks are emphasized.
*/
void square() {
float *x = mData.get();
for (int i = 0; i < mFrameCounter; i++) {
x[i] *= x[i];
}
}
/**
* Amplify a signal so that the peak matches the specified target.
*
* @param target final max value
* @return gain applied to signal
*/
float normalize(float target) {
float maxValue = 1.0e-9f;
for (int i = 0; i < mFrameCounter; i++) {
maxValue = std::max(maxValue, abs(mData[i]));
}
float gain = target / maxValue;
for (int i = 0; i < mFrameCounter; i++) {
mData[i] *= gain;
}
return gain;
}
private:
std::unique_ptr<float[]> mData;
int32_t mFrameCounter = 0;
int32_t mMaxFrames = 0;
int32_t mSampleRate = kDefaultSampleRate; // common default
};
static int measureLatencyFromPulse(AudioRecording &recorded,
AudioRecording &pulse,
LatencyReport *report) {
report->latencyInFrames = 0;
report->confidence = 0.0;
int numCorrelations = recorded.size() - pulse.size();
if (numCorrelations < 10) {
ALOGE("%s() recording too small = %d frames\n", __func__, recorded.size());
return -1;
}
std::unique_ptr<float[]> correlations= std::make_unique<float[]>(numCorrelations);
// Correlate pulse against the recorded data.
for (int i = 0; i < numCorrelations; i++) {
float correlation = (float) calculateNormalizedCorrelation(&recorded.getData()[i],
&pulse.getData()[0],
pulse.size());
correlations[i] = correlation;
}
// Find highest peak in correlation array.
float peakCorrelation = 0.0;
int peakIndex = -1;
for (int i = 0; i < numCorrelations; i++) {
float value = abs(correlations[i]);
if (value > peakCorrelation) {
peakCorrelation = value;
peakIndex = i;
}
}
if (peakIndex < 0) {
ALOGE("%s() no signal for correlation\n", __func__);
return -2;
}
#if 0
// Dump correlation data for charting.
else {
const int margin = 50;
int startIndex = std::max(0, peakIndex - margin);
int endIndex = std::min(numCorrelations - 1, peakIndex + margin);
for (int index = startIndex; index < endIndex; index++) {
ALOGD("Correlation, %d, %f", index, correlations[index]);
}
}
#endif
report->latencyInFrames = peakIndex;
report->confidence = peakCorrelation;
return 0;
}
// ====================================================================================
class LoopbackProcessor {
public:
virtual ~LoopbackProcessor() = default;
enum result_code {
RESULT_OK = 0,
ERROR_NOISY = -99,
ERROR_VOLUME_TOO_LOW,
ERROR_VOLUME_TOO_HIGH,
ERROR_CONFIDENCE,
ERROR_INVALID_STATE,
ERROR_GLITCHES,
ERROR_NO_LOCK
};
virtual void prepareToTest() {
reset();
}
virtual void reset() {
mResult = 0;
mResetCount++;
}
virtual result_code processInputFrame(const float *frameData, int channelCount) = 0;
virtual result_code processOutputFrame(float *frameData, int channelCount) = 0;
void process(const float *inputData, int inputChannelCount, int numInputFrames,
float *outputData, int outputChannelCount, int numOutputFrames) {
int numBoth = std::min(numInputFrames, numOutputFrames);
// Process one frame at a time.
for (int i = 0; i < numBoth; i++) {
processInputFrame(inputData, inputChannelCount);
inputData += inputChannelCount;
processOutputFrame(outputData, outputChannelCount);
outputData += outputChannelCount;
}
// If there is more input than output.
for (int i = numBoth; i < numInputFrames; i++) {
processInputFrame(inputData, inputChannelCount);
inputData += inputChannelCount;
}
// If there is more output than input.
for (int i = numBoth; i < numOutputFrames; i++) {
processOutputFrame(outputData, outputChannelCount);
outputData += outputChannelCount;
}
}
virtual std::string analyze() = 0;
virtual void printStatus() {};
int32_t getResult() {
return mResult;
}
void setResult(int32_t result) {
mResult = result;
}
virtual bool isDone() {
return false;
}
virtual int save(const char *fileName) {
(void) fileName;
return -1;
}
virtual int load(const char *fileName) {
(void) fileName;
return -1;
}
virtual void setSampleRate(int32_t sampleRate) {
mSampleRate = sampleRate;
}
int32_t getSampleRate() const {
return mSampleRate;
}
int32_t getResetCount() const {
return mResetCount;
}
/** Called when not enough input frames could be read after synchronization.
*/
virtual void onInsufficientRead() {
reset();
}
protected:
int32_t mResetCount = 0;
private:
int32_t mSampleRate = kDefaultSampleRate;
int32_t mResult = 0;
};
class LatencyAnalyzer : public LoopbackProcessor {
public:
LatencyAnalyzer() : LoopbackProcessor() {}
virtual ~LatencyAnalyzer() = default;
virtual int32_t getProgress() const = 0;
virtual int getState() = 0;
// @return latency in frames
virtual int32_t getMeasuredLatency() = 0;
virtual double getMeasuredConfidence() = 0;
virtual double getBackgroundRMS() = 0;
virtual double getSignalRMS() = 0;
};
// ====================================================================================
/**
* Measure latency given a loopback stream data.
* Use an encoded bit train as the sound source because it
* has an unambiguous correlation value.
* Uses a state machine to cycle through various stages.
*
*/
class PulseLatencyAnalyzer : public LatencyAnalyzer {
public:
PulseLatencyAnalyzer() : LatencyAnalyzer() {
int32_t maxLatencyFrames = getSampleRate() * kMaxLatencyMillis / kMillisPerSecond;
int32_t numPulseBits = getSampleRate() * kPulseLengthMillis
/ (kFramesPerEncodedBit * kMillisPerSecond);
int32_t pulseLength = numPulseBits * kFramesPerEncodedBit;
mFramesToRecord = pulseLength + maxLatencyFrames;
mAudioRecording.allocate(mFramesToRecord);
mAudioRecording.setSampleRate(getSampleRate());
generateRandomPulse(pulseLength);
}
void generateRandomPulse(int32_t pulseLength) {
mPulse.allocate(pulseLength);
RandomPulseGenerator pulser(kFramesPerEncodedBit);
for (int i = 0; i < pulseLength; i++) {
mPulse.write(pulser.nextFloat());
}
}
int getState() override {
return mState;
}
void setSampleRate(int32_t sampleRate) override {
LoopbackProcessor::setSampleRate(sampleRate);
mAudioRecording.setSampleRate(sampleRate);
}
void reset() override {
LoopbackProcessor::reset();
mState = STATE_MEASURE_BACKGROUND;
mDownCounter = (int32_t) (getSampleRate() * kBackgroundMeasurementLengthSeconds);
mLoopCounter = 0;
mPulseCursor = 0;
mBackgroundSumSquare = 0.0f;
mBackgroundSumCount = 0;
mBackgroundRMS = 0.0f;
mSignalRMS = 0.0f;
mAudioRecording.clear();
mLatencyReport.reset();
}
bool hasEnoughData() {
return mAudioRecording.isFull();
}
bool isDone() override {
return mState == STATE_DONE;
}
int32_t getProgress() const override {
return mAudioRecording.size();
}
std::string analyze() override {
std::stringstream report;
report << "PulseLatencyAnalyzer ---------------\n";
report << LOOPBACK_RESULT_TAG "test.state = "
<< std::setw(8) << mState << "\n";
report << LOOPBACK_RESULT_TAG "test.state.name = "
<< convertStateToText(mState) << "\n";
report << LOOPBACK_RESULT_TAG "background.rms = "
<< std::setw(8) << mBackgroundRMS << "\n";
int32_t newResult = RESULT_OK;
if (mState != STATE_GOT_DATA) {
report << "WARNING - Bad state. Check volume on device.\n";
// setResult(ERROR_INVALID_STATE);
} else {
float gain = mAudioRecording.normalize(1.0f);
measureLatencyFromPulse(mAudioRecording,
mPulse,
&mLatencyReport);
if (mLatencyReport.confidence < kMinimumConfidence) {
report << " ERROR - confidence too low!";
newResult = ERROR_CONFIDENCE;
} else {
mSignalRMS = calculateRootMeanSquare(
&mAudioRecording.getData()[mLatencyReport.latencyInFrames], mPulse.size())
/ gain;
}
double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
/ getSampleRate();
report << LOOPBACK_RESULT_TAG "latency.frames = " << std::setw(8)
<< mLatencyReport.latencyInFrames << "\n";
report << LOOPBACK_RESULT_TAG "latency.msec = " << std::setw(8)
<< latencyMillis << "\n";
report << LOOPBACK_RESULT_TAG "latency.confidence = " << std::setw(8)
<< mLatencyReport.confidence << "\n";
}
mState = STATE_DONE;
if (getResult() == RESULT_OK) {
setResult(newResult);
}
return report.str();
}
int32_t getMeasuredLatency() override {
return mLatencyReport.latencyInFrames;
}
double getMeasuredConfidence() override {
return mLatencyReport.confidence;
}
double getBackgroundRMS() override {
return mBackgroundRMS;
}
double getSignalRMS() override {
return mSignalRMS;
}
bool isRecordingComplete() {
return mState == STATE_GOT_DATA;
}
void printStatus() override {
ALOGD("latency: st = %d = %s", mState, convertStateToText(mState));
}
result_code processInputFrame(const float *frameData, int channelCount) override {
echo_state nextState = mState;
mLoopCounter++;
switch (mState) {
case STATE_MEASURE_BACKGROUND:
// Measure background RMS on channel 0
mBackgroundSumSquare += frameData[0] * frameData[0];
mBackgroundSumCount++;
mDownCounter--;
if (mDownCounter <= 0) {
mBackgroundRMS = sqrtf(mBackgroundSumSquare / mBackgroundSumCount);
nextState = STATE_IN_PULSE;
mPulseCursor = 0;
}
break;
case STATE_IN_PULSE:
// Record input until the mAudioRecording is full.
mAudioRecording.write(frameData, channelCount, 1);
if (hasEnoughData()) {
nextState = STATE_GOT_DATA;
}
break;
case STATE_GOT_DATA:
case STATE_DONE:
default:
break;
}
mState = nextState;
return RESULT_OK;
}
result_code processOutputFrame(float *frameData, int channelCount) override {
switch (mState) {
case STATE_IN_PULSE:
if (mPulseCursor < mPulse.size()) {
float pulseSample = mPulse.getData()[mPulseCursor++];
for (int i = 0; i < channelCount; i++) {
frameData[i] = pulseSample;
}
} else {
for (int i = 0; i < channelCount; i++) {
frameData[i] = 0;
}
}
break;
case STATE_MEASURE_BACKGROUND:
case STATE_GOT_DATA:
case STATE_DONE:
default:
for (int i = 0; i < channelCount; i++) {
frameData[i] = 0.0f; // silence
}
break;
}
return RESULT_OK;
}
private:
enum echo_state {
STATE_MEASURE_BACKGROUND,
STATE_IN_PULSE,
STATE_GOT_DATA, // must match RoundTripLatencyActivity.java
STATE_DONE,
};
const char *convertStateToText(echo_state state) {
switch (state) {
case STATE_MEASURE_BACKGROUND:
return "INIT";
case STATE_IN_PULSE:
return "PULSE";
case STATE_GOT_DATA:
return "GOT_DATA";
case STATE_DONE:
return "DONE";
}
return "UNKNOWN";
}
int32_t mDownCounter = 500;
int32_t mLoopCounter = 0;
echo_state mState = STATE_MEASURE_BACKGROUND;
static constexpr int32_t kFramesPerEncodedBit = 8; // multiple of 2
static constexpr int32_t kPulseLengthMillis = 500;
static constexpr double kBackgroundMeasurementLengthSeconds = 0.5;
AudioRecording mPulse;
int32_t mPulseCursor = 0;
double mBackgroundSumSquare = 0.0;
int32_t mBackgroundSumCount = 0;
double mBackgroundRMS = 0.0;
double mSignalRMS = 0.0;
int32_t mFramesToRecord = 0;
AudioRecording mAudioRecording; // contains only the input after starting the pulse
LatencyReport mLatencyReport;
};
#endif // ANALYZER_LATENCY_ANALYZER_H

View File

@ -0,0 +1,98 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_MANCHESTER_ENCODER_H
#define ANALYZER_MANCHESTER_ENCODER_H
#include <cstdint>
/**
* Encode bytes using Manchester Coding scheme.
*
* Manchester Code is self clocking.
* There is a transition in the middle of every bit.
* Zero is high then low.
* One is low then high.
*
* This avoids having long DC sections that would droop when
* passed though analog circuits with AC coupling.
*
* IEEE 802.3 compatible.
*/
class ManchesterEncoder {
public:
ManchesterEncoder(int samplesPerPulse)
: mSamplesPerPulse(samplesPerPulse)
, mSamplesPerPulseHalf(samplesPerPulse / 2)
, mCursor(samplesPerPulse) {
}
virtual ~ManchesterEncoder() = default;
/**
* This will be called when the next byte is needed.
* @return
*/
virtual uint8_t onNextByte() = 0;
/**
* Generate the next floating point sample.
* @return
*/
virtual float nextFloat() {
advanceSample();
if (mCurrentBit) {
return (mCursor < mSamplesPerPulseHalf) ? -1.0f : 1.0f; // one
} else {
return (mCursor < mSamplesPerPulseHalf) ? 1.0f : -1.0f; // zero
}
}
protected:
/**
* This will be called when a new bit is ready to be encoded.
* It can be used to prepare the encoded samples.
* @param current
*/
virtual void onNextBit(bool /* current */) {};
void advanceSample() {
// Are we ready for a new bit?
if (++mCursor >= mSamplesPerPulse) {
mCursor = 0;
if (mBitsLeft == 0) {
mCurrentByte = onNextByte();
mBitsLeft = 8;
}
--mBitsLeft;
mCurrentBit = (mCurrentByte >> mBitsLeft) & 1;
onNextBit(mCurrentBit);
}
}
bool getCurrentBit() {
return mCurrentBit;
}
const int mSamplesPerPulse;
const int mSamplesPerPulseHalf;
int mCursor;
int mBitsLeft = 0;
uint8_t mCurrentByte = 0;
bool mCurrentBit = false;
};
#endif //ANALYZER_MANCHESTER_ENCODER_H

View File

@ -0,0 +1,68 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_PEAK_DETECTOR_H
#define ANALYZER_PEAK_DETECTOR_H
#include <math.h>
/**
* Measure a peak envelope by rising with the peaks,
* and decaying exponentially after each peak.
* The absolute value of the input signal is used.
*/
class PeakDetector {
public:
void reset() {
mLevel = 0.0;
}
double process(double input) {
mLevel *= mDecay; // exponential decay
input = fabs(input);
// never fall below the input signal
if (input > mLevel) {
mLevel = input;
}
return mLevel;
}
double getLevel() const {
return mLevel;
}
double getDecay() const {
return mDecay;
}
/**
* Multiply the level by this amount on every iteration.
* This provides an exponential decay curve.
* A value just under 1.0 is best, for example, 0.99;
* @param decay scale level for each input
*/
void setDecay(double decay) {
mDecay = decay;
}
private:
static constexpr double kDefaultDecay = 0.99f;
double mLevel = 0.0;
double mDecay = kDefaultDecay;
};
#endif //ANALYZER_PEAK_DETECTOR_H

View File

@ -0,0 +1,57 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_PSEUDORANDOM_H
#define ANALYZER_PSEUDORANDOM_H
#include <cctype>
class PseudoRandom {
public:
PseudoRandom(int64_t seed = 99887766)
: mSeed(seed)
{}
/**
* Returns the next random double from -1.0 to 1.0
*
* @return value from -1.0 to 1.0
*/
double nextRandomDouble() {
return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
}
/** Calculate random 32 bit number using linear-congruential method
* with known real-time performance.
*/
int32_t nextRandomInteger() {
#if __has_builtin(__builtin_mul_overflow) && __has_builtin(__builtin_add_overflow)
int64_t prod;
// Use values for 64-bit sequence from MMIX by Donald Knuth.
__builtin_mul_overflow(mSeed, (int64_t)6364136223846793005, &prod);
__builtin_add_overflow(prod, (int64_t)1442695040888963407, &mSeed);
#else
mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
#endif
return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
}
private:
int64_t mSeed;
};
#endif //ANALYZER_PSEUDORANDOM_H

View File

@ -0,0 +1,43 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_RANDOM_PULSE_GENERATOR_H
#define ANALYZER_RANDOM_PULSE_GENERATOR_H
#include <stdlib.h>
#include "RoundedManchesterEncoder.h"
/**
* Encode random ones and zeros using Manchester Code per IEEE 802.3.
*/
class RandomPulseGenerator : public RoundedManchesterEncoder {
public:
RandomPulseGenerator(int samplesPerPulse)
: RoundedManchesterEncoder(samplesPerPulse) {
}
virtual ~RandomPulseGenerator() = default;
/**
* This will be called when the next byte is needed.
* @return random byte
*/
uint8_t onNextByte() override {
return static_cast<uint8_t>(rand());
}
};
#endif //ANALYZER_RANDOM_PULSE_GENERATOR_H

View File

@ -0,0 +1,88 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
#define ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
#include <math.h>
#include <memory.h>
#include <stdlib.h>
#include "ManchesterEncoder.h"
/**
* Encode bytes using Manchester Code.
* Round the edges using a half cosine to reduce ringing caused by a hard edge.
*/
class RoundedManchesterEncoder : public ManchesterEncoder {
public:
RoundedManchesterEncoder(int samplesPerPulse)
: ManchesterEncoder(samplesPerPulse) {
int rampSize = samplesPerPulse / 4;
mZeroAfterZero = std::make_unique<float[]>(samplesPerPulse);
mZeroAfterOne = std::make_unique<float[]>(samplesPerPulse);
int sampleIndex = 0;
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
float phase = (rampIndex + 1) * M_PI / rampSize;
float sample = -cosf(phase);
mZeroAfterZero[sampleIndex] = sample;
mZeroAfterOne[sampleIndex] = 1.0f;
sampleIndex++;
}
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
mZeroAfterZero[sampleIndex] = 1.0f;
mZeroAfterOne[sampleIndex] = 1.0f;
sampleIndex++;
}
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
float phase = (rampIndex + 1) * M_PI / rampSize;
float sample = cosf(phase);
mZeroAfterZero[sampleIndex] = sample;
mZeroAfterOne[sampleIndex] = sample;
sampleIndex++;
}
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
mZeroAfterZero[sampleIndex] = -1.0f;
mZeroAfterOne[sampleIndex] = -1.0f;
sampleIndex++;
}
}
void onNextBit(bool current) override {
// Do we need to use the rounded edge?
mCurrentSamples = (current ^ mPreviousBit)
? mZeroAfterOne.get()
: mZeroAfterZero.get();
mPreviousBit = current;
}
float nextFloat() override {
advanceSample();
float output = mCurrentSamples[mCursor];
if (getCurrentBit()) output = -output;
return output;
}
private:
bool mPreviousBit = false;
float *mCurrentSamples = nullptr;
std::unique_ptr<float[]> mZeroAfterZero;
std::unique_ptr<float[]> mZeroAfterOne;
};
#endif //ANALYZER_ROUNDED_MANCHESTER_ENCODER_H

View File

@ -0,0 +1,41 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef NATIVE_AUDIO_ANDROID_DEBUG_H_H
#define NATIVE_AUDIO_ANDROID_DEBUG_H_H
#include <android/log.h>
#if 1
#define MODULE_NAME "OboeAudio"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, MODULE_NAME, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, MODULE_NAME, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, MODULE_NAME, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, MODULE_NAME, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, MODULE_NAME, __VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, MODULE_NAME, __VA_ARGS__)
#else
#define LOGV(...)
#define LOGD(...)
#define LOGI(...)
#define LOGW(...)
#define LOGE(...)
#define LOGF(...)
#endif
#endif //NATIVE_AUDIO_ANDROID_DEBUG_H_H

View File

@ -0,0 +1,35 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include "ExponentialShape.h"
ExponentialShape::ExponentialShape()
: FlowGraphFilter(1) {
}
int32_t ExponentialShape::onProcess(int32_t numFrames) {
float *inputs = input.getBuffer();
float *outputs = output.getBuffer();
for (int i = 0; i < numFrames; i++) {
float normalizedPhase = (inputs[i] * 0.5) + 0.5;
outputs[i] = mMinimum * powf(mRatio, normalizedPhase);
}
return numFrames;
}

Some files were not shown because too many files have changed in this diff Show More