- add oboe library snapshot

This commit is contained in:
Dmytro Bogovych 2021-08-02 16:00:18 +03:00
parent cb9c2b693e
commit ed39725641
1289 changed files with 133692 additions and 0 deletions

8
src/libs/oboe/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*/.DS_Store
.DS_Store
.externalNativeBuild/
.cxx/
.idea
build
.logpile

9
src/libs/oboe/AUTHORS Normal file
View File

@ -0,0 +1,9 @@
# This is the official list of authors for copyright purposes.
# This file is distinct from the CONTRIBUTORS files.
# See the latter for an explanation.
# Names should be added to this file as:
# Name or Organization <email address>
# The email address is not required for organizations.
Google Inc.

View File

@ -0,0 +1,98 @@
cmake_minimum_required(VERSION 3.4.1)
# Set the name of the project and store it in PROJECT_NAME. Also set the following variables:
# PROJECT_SOURCE_DIR (usually the root directory where Oboe has been cloned e.g.)
# PROJECT_BINARY_DIR (usually the containing project's binary directory,
# e.g. ${OBOE_HOME}/samples/RhythmGame/.externalNativeBuild/cmake/ndkExtractorDebug/x86/oboe-bin)
project(oboe)
set (oboe_sources
src/aaudio/AAudioLoader.cpp
src/aaudio/AudioStreamAAudio.cpp
src/common/AudioSourceCaller.cpp
src/common/AudioStream.cpp
src/common/AudioStreamBuilder.cpp
src/common/DataConversionFlowGraph.cpp
src/common/FilterAudioStream.cpp
src/common/FixedBlockAdapter.cpp
src/common/FixedBlockReader.cpp
src/common/FixedBlockWriter.cpp
src/common/LatencyTuner.cpp
src/common/SourceFloatCaller.cpp
src/common/SourceI16Caller.cpp
src/common/SourceI24Caller.cpp
src/common/SourceI32Caller.cpp
src/common/Utilities.cpp
src/common/QuirksManager.cpp
src/fifo/FifoBuffer.cpp
src/fifo/FifoController.cpp
src/fifo/FifoControllerBase.cpp
src/fifo/FifoControllerIndirect.cpp
src/flowgraph/FlowGraphNode.cpp
src/flowgraph/ChannelCountConverter.cpp
src/flowgraph/ClipToRange.cpp
src/flowgraph/ManyToMultiConverter.cpp
src/flowgraph/MonoToMultiConverter.cpp
src/flowgraph/MultiToMonoConverter.cpp
src/flowgraph/RampLinear.cpp
src/flowgraph/SampleRateConverter.cpp
src/flowgraph/SinkFloat.cpp
src/flowgraph/SinkI16.cpp
src/flowgraph/SinkI24.cpp
src/flowgraph/SinkI32.cpp
src/flowgraph/SourceFloat.cpp
src/flowgraph/SourceI16.cpp
src/flowgraph/SourceI24.cpp
src/flowgraph/SourceI32.cpp
src/flowgraph/resampler/IntegerRatio.cpp
src/flowgraph/resampler/LinearResampler.cpp
src/flowgraph/resampler/MultiChannelResampler.cpp
src/flowgraph/resampler/PolyphaseResampler.cpp
src/flowgraph/resampler/PolyphaseResamplerMono.cpp
src/flowgraph/resampler/PolyphaseResamplerStereo.cpp
src/flowgraph/resampler/SincResampler.cpp
src/flowgraph/resampler/SincResamplerStereo.cpp
src/opensles/AudioInputStreamOpenSLES.cpp
src/opensles/AudioOutputStreamOpenSLES.cpp
src/opensles/AudioStreamBuffered.cpp
src/opensles/AudioStreamOpenSLES.cpp
src/opensles/EngineOpenSLES.cpp
src/opensles/OpenSLESUtilities.cpp
src/opensles/OutputMixerOpenSLES.cpp
src/common/StabilizedCallback.cpp
src/common/Trace.cpp
src/common/Version.cpp
)
add_library(oboe ${oboe_sources})
# Specify directories which the compiler should look for headers
target_include_directories(oboe
PRIVATE src
PUBLIC include)
# Compile Flags:
# Enable -Werror when building debug config
# Enable -Ofast
target_compile_options(oboe
PRIVATE
-std=c++17
-Wall
-Wextra-semi
-Wshadow
-Wshadow-field
-Ofast
"$<$<CONFIG:DEBUG>:-Werror>")
# Enable logging of D,V for debug builds
target_compile_definitions(oboe PUBLIC $<$<CONFIG:DEBUG>:OBOE_ENABLE_LOGGING=1>)
target_link_libraries(oboe PRIVATE log OpenSLES)
# When installing oboe put the libraries in the lib/<ABI> folder e.g. lib/arm64-v8a
install(TARGETS oboe
LIBRARY DESTINATION lib/${ANDROID_ABI}
ARCHIVE DESTINATION lib/${ANDROID_ABI})
# Also install the headers
install(DIRECTORY include/oboe DESTINATION include)

View File

@ -0,0 +1 @@
Please see the CONTRIBUTING.md file for more information.

View File

@ -0,0 +1,25 @@
Want to contribute? Great! First, read this page (including the small print at the end).
### Before you contribute
Before we can use your code, you must sign the
[Google Individual Contributor License
Agreement](https://developers.google.com/open-source/cla/individual?csw=1)
(CLA), which you can do online. The CLA is necessary mainly because you own the
copyright to your changes, even after your contribution becomes part of our
codebase, so we need your permission to use and distribute your code. We also
need to be sure of various other things—for instance that you'll tell us if you
know that your code infringes on other people's patents. You don't have to sign
the CLA until after you've submitted your code for review and a member has
approved it, but you must do it before we can put your code into our codebase.
Before you start working on a larger contribution, you should get in touch with
us first through the issue tracker with your idea so that we can help out and
possibly guide you. Coordinating up front makes it much easier to avoid
frustration later on.
### Code reviews
All submissions, including submissions by project members, require review. We
use Github pull requests for this purpose.
### The small print
Contributions made by corporations are covered by a different agreement than
the one above, the Software Grant and Corporate Contributor License Agreement.

View File

@ -0,0 +1,14 @@
# People who have agreed to one of the CLAs and can contribute patches.
# The AUTHORS file lists the copyright holders; this file
# lists people. For example, Google employees are listed here
# but not in AUTHORS, because Google holds the copyright.
#
# https://developers.google.com/open-source/cla/individual
# https://developers.google.com/open-source/cla/corporate
#
# Names should be added to this file as:
# Name <email address>
Phil Burk <philburk@google.com>
Don Turner <donturner@google.com>
Mikhail Naganov <mnaganov@google.com>

2482
src/libs/oboe/Doxyfile Normal file

File diff suppressed because it is too large Load Diff

202
src/libs/oboe/LICENSE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

202
src/libs/oboe/NOTICE Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

1
src/libs/oboe/README Normal file
View File

@ -0,0 +1 @@
Please see the README.md file for more information.

54
src/libs/oboe/README.md Normal file
View File

@ -0,0 +1,54 @@
# Oboe [![Build CI](https://github.com/google/oboe/workflows/Build%20CI/badge.svg)](https://github.com/google/oboe/actions)
[![Introduction to Oboe video](docs/images/getting-started-video.jpg)](https://www.youtube.com/watch?v=csfHAbr5ilI&list=PLWz5rJ2EKKc_duWv9IPNvx9YBudNMmLSa)
Oboe is a C++ library which makes it easy to build high-performance audio apps on Android. It was created primarily to allow developers to target a simplified API that works across multiple API levels back to API level 16 (Jelly Bean).
## Features
- Compatible with API 16 onwards - runs on 99% of Android devices
- Chooses the audio API (OpenSL ES on API 16+ or AAudio on API 27+) which will give the best audio performance on the target Android device
- Automatic latency tuning
- Modern C++ allowing you to write clean, elegant code
- Workarounds for some known issues
- [Used by popular apps and frameworks](https://github.com/google/oboe/wiki/AppsUsingOboe)
## Documentation
- [Getting Started Guide](docs/GettingStarted.md)
- [Full Guide to Oboe](docs/FullGuide.md)
- [API reference](https://google.github.io/oboe/reference)
- [Tech Notes](docs/notes/)
- [History of Audio features/bugs by Android version](docs/AndroidAudioHistory.md)
- [Migration guide for apps using OpenSL ES](docs/OpenSLESMigration.md)
- [Frequently Asked Questions](docs/FAQ.md) (FAQ)
- [Our roadmap](https://github.com/google/oboe/milestones) - Vote on a feature/issue by adding a thumbs up to the first comment.
### Community
- Reddit: [r/androidaudiodev](https://www.reddit.com/r/androidaudiodev/)
- StackOverflow: [#oboe](https://stackoverflow.com/questions/tagged/oboe)
## Testing
- [**OboeTester** app for measuring latency, glitches, etc.](https://github.com/google/oboe/tree/master/apps/OboeTester/docs)
- [Oboe unit tests](https://github.com/google/oboe/tree/master/tests)
## Videos
- [Getting started with Oboe](https://www.youtube.com/playlist?list=PLWz5rJ2EKKc_duWv9IPNvx9YBudNMmLSa)
- [Low Latency Audio - Because Your Ears Are Worth It](https://www.youtube.com/watch?v=8vOf_fDtur4) (Android Dev Summit '18)
- [Winning on Android](https://www.youtube.com/watch?v=tWBojmBpS74) - How to optimize an Android audio app. (ADC '18)
## Sample code and apps
- Sample apps can be found in the [samples directory](samples).
- A complete "effects processor" app called FXLab can be found in the [apps/fxlab folder](apps/fxlab).
- Also check out the [Rhythm Game codelab](https://developer.android.com/codelabs/musicalgame-using-oboe?hl=en#0).
### Third party sample code
- [Ableton Link integration demo](https://github.com/jbloit/AndroidLinkAudio) (author: jbloit)
## Contributing
We would love to receive your pull requests. Before we can though, please read the [contributing](CONTRIBUTING.md) guidelines.
## Version history
View the [releases page](../../releases).
## License
[LICENSE](LICENSE)

View File

@ -0,0 +1,13 @@
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build/
.idea/
/app/build/
/app/release/
/app/debug/
/app/app.iml
*.iml
/app/externalNativeBuild/

View File

@ -0,0 +1,7 @@
status: PUBLISHED
technologies: [Android, NDK]
categories: [NDK, C++]
languages: [C++, Java]
solutions: [Mobile]
github: googlesamples/android-ndk
license: apache2

View File

@ -0,0 +1,6 @@
# Oboe Tester
OboeTester is an app that can be used to test many of the features of Oboe, AAudio and OpenSL ES.
It can also be used to measure device latency and glitches.
# [OboeTester Documentation](docs)

View File

@ -0,0 +1,34 @@
cmake_minimum_required(VERSION 3.4.1)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror -Wall -std=c++14 -DOBOE_NO_INCLUDE_AAUDIO -fvisibility=hidden")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O2")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3")
link_directories(${CMAKE_CURRENT_LIST_DIR}/..)
file(GLOB_RECURSE app_native_sources src/main/cpp/*)
### Name must match loadLibrary() call in MainActivity.java
add_library(oboetester SHARED ${app_native_sources})
### INCLUDE OBOE LIBRARY ###
# Set the path to the Oboe library directory
set (OBOE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
# Add the Oboe library as a subproject. Since Oboe is an out-of-tree source library we must also
# specify a binary directory
add_subdirectory(${OBOE_DIR} ./oboe-bin)
# Specify the path to the Oboe header files and the source.
include_directories(
${OBOE_DIR}/include
${OBOE_DIR}/src
)
### END OBOE INCLUDE SECTION ###
# link to oboe
target_link_libraries(oboetester log oboe atomic)
# bump 4 to resync CMake

View File

@ -0,0 +1,44 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 29
defaultConfig {
applicationId = "com.mobileer.oboetester"
minSdkVersion 23
targetSdkVersion 29
// Also update the versions in the AndroidManifest.xml file.
versionCode 52
versionName "2.1.2"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-std=c++14"
abiFilters "x86", "x86_64", "armeabi-v7a", "arm64-v8a"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
debug {
jniDebuggable true
}
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
implementation 'com.android.support.constraint:constraint-layout:2.0.4'
testImplementation 'junit:junit:4.13-beta-3'
implementation 'com.android.support:appcompat-v7:28.0.0'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Thu Apr 11 16:29:30 PDT 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip

View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,9 @@
## This file must *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
#Thu Apr 11 16:29:25 PDT 2019
ndk.dir=/Users/philburk/Library/Android/sdk/ndk-bundle
sdk.dir=/Users/philburk/Library/Android/sdk

View File

@ -0,0 +1,17 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /Users/gfan/dev/android-sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

View File

@ -0,0 +1,127 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mobileer.oboetester"
android:versionCode="52"
android:versionName="2.1.2">
<!-- versionCode and versionName also have to be updated in build.gradle -->
<uses-feature
android:name="android.hardware.microphone"
android:required="false" />
<uses-feature
android:name="android.hardware.audio.output"
android:required="true" />
<uses-feature
android:name="android.hardware.touchscreen"
android:required="false" />
<uses-feature
android:name="android.software.midi"
android:required="false" />
<uses-feature
android:name="android.software.leanback"
android:required="false" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.INTERNET" />
<!-- This is needed for sharing test results. -->
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<application
android:allowBackup="false"
android:fullBackupContent="false"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:launchMode="singleTask"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<category android:name="android.intent.category.LEANBACK_LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".TestOutputActivity"
android:label="@string/title_activity_test_output"
android:screenOrientation="portrait" />
<activity
android:name=".TestInputActivity"
android:label="@string/title_activity_test_input"
android:screenOrientation="portrait" />
<activity
android:name=".TapToToneActivity"
android:label="@string/title_activity_output_latency"
android:screenOrientation="portrait" />
<activity
android:name=".RecorderActivity"
android:label="@string/title_activity_recorder"
android:screenOrientation="portrait" />
<activity
android:name=".EchoActivity"
android:label="@string/title_activity_echo"
android:screenOrientation="portrait" />
<activity
android:name=".RoundTripLatencyActivity"
android:label="@string/title_activity_rt_latency"
android:screenOrientation="portrait" />
<activity
android:name=".ManualGlitchActivity"
android:label="@string/title_activity_glitches"
android:screenOrientation="portrait" />
<activity
android:name=".AutomatedGlitchActivity"
android:label="@string/title_activity_auto_glitches"
android:screenOrientation="portrait" />
<activity
android:name=".TestDisconnectActivity"
android:label="@string/title_test_disconnect"
android:screenOrientation="portrait" />
<activity
android:name=".DeviceReportActivity"
android:label="@string/title_report_devices"
android:screenOrientation="portrait" />
<activity
android:name=".TestDataPathsActivity"
android:label="@string/title_data_paths"
android:screenOrientation="portrait" />
<activity
android:name=".ExtraTestsActivity"
android:exported="true"
android:label="@string/title_extra_tests" />
<activity
android:name=".ExternalTapToToneActivity"
android:label="@string/title_external_tap"
android:exported="true" />
<service
android:name=".MidiTapTester"
android:permission="android.permission.BIND_MIDI_DEVICE_SERVICE">
<intent-filter>
<action android:name="android.media.midi.MidiDeviceService" />
</intent-filter>
<meta-data
android:name="android.media.midi.MidiDeviceService"
android:resource="@xml/service_device_info" />
</service>
<provider
android:name="android.support.v4.content.FileProvider"
android:authorities="${applicationId}.provider"
android:exported="false"
android:grantUriPermissions="true">
<meta-data
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/provider_paths" />
</provider>
</application>
</manifest>

View File

@ -0,0 +1,39 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstring>
#include <sched.h>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "AudioStreamGateway.h"
using namespace oboe::flowgraph;
oboe::DataCallbackResult AudioStreamGateway::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
printScheduler();
if (mAudioSink != nullptr) {
mAudioSink->read(audioData, numFrames);
}
return oboe::DataCallbackResult::Continue;
}

View File

@ -0,0 +1,55 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_AUDIOGRAPHRUNNER_H
#define NATIVEOBOE_AUDIOGRAPHRUNNER_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "oboe/Oboe.h"
#include "OboeTesterStreamCallback.h"
using namespace oboe::flowgraph;
/**
* Bridge between an audio flowgraph and an audio device.
* Pass in an AudioSink and then pass
* this object to the AudioStreamBuilder as a callback.
*/
class AudioStreamGateway : public OboeTesterStreamCallback {
public:
virtual ~AudioStreamGateway() = default;
void setAudioSink(std::shared_ptr<oboe::flowgraph::FlowGraphSink> sink) {
mAudioSink = sink;
}
/**
* Called by Oboe when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
private:
std::shared_ptr<oboe::flowgraph::FlowGraphSink> mAudioSink;
};
#endif //NATIVEOBOE_AUDIOGRAPHRUNNER_H

View File

@ -0,0 +1,85 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "FormatConverterBox.h"
FormatConverterBox::FormatConverterBox(int32_t numSamples,
oboe::AudioFormat inputFormat,
oboe::AudioFormat outputFormat) {
mInputFormat = inputFormat;
mOutputFormat = outputFormat;
mInputBuffer = std::make_unique<uint8_t[]>(numSamples * sizeof(int32_t));
mOutputBuffer = std::make_unique<uint8_t[]>(numSamples * sizeof(int32_t));
mSource.reset();
switch (mInputFormat) {
case oboe::AudioFormat::I16:
mSource = std::make_unique<oboe::flowgraph::SourceI16>(1);
break;
case oboe::AudioFormat::I24:
mSource = std::make_unique<oboe::flowgraph::SourceI24>(1);
break;
case oboe::AudioFormat::I32:
mSource = std::make_unique<oboe::flowgraph::SourceI32>(1);
break;
case oboe::AudioFormat::Float:
case oboe::AudioFormat::Invalid:
case oboe::AudioFormat::Unspecified:
mSource = std::make_unique<oboe::flowgraph::SourceFloat>(1);
break;
}
mSink.reset();
switch (mOutputFormat) {
case oboe::AudioFormat::I16:
mSink = std::make_unique<oboe::flowgraph::SinkI16>(1);
break;
case oboe::AudioFormat::I24:
mSink = std::make_unique<oboe::flowgraph::SinkI24>(1);
break;
case oboe::AudioFormat::I32:
mSink = std::make_unique<oboe::flowgraph::SinkI32>(1);
break;
case oboe::AudioFormat::Float:
case oboe::AudioFormat::Invalid:
case oboe::AudioFormat::Unspecified:
mSink = std::make_unique<oboe::flowgraph::SinkFloat>(1);
break;
}
if (mSource && mSink) {
mSource->output.connect(&mSink->input);
mSink->pullReset();
}
}
int32_t FormatConverterBox::convertInternalBuffers(int32_t numSamples) {
return convert(getOutputBuffer(), numSamples, getInputBuffer());
}
int32_t FormatConverterBox::convertToInternalOutput(int32_t numSamples, const void *inputBuffer) {
return convert(getOutputBuffer(), numSamples, inputBuffer);
}
int32_t FormatConverterBox::convertFromInternalInput(void *outputBuffer, int32_t numSamples) {
return convert(outputBuffer, numSamples, getInputBuffer());
}
int32_t FormatConverterBox::convert(void *outputBuffer, int32_t numSamples, const void *inputBuffer) {
mSource->setData(inputBuffer, numSamples);
return mSink->read(outputBuffer, numSamples);
}

View File

@ -0,0 +1,101 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FORMAT_CONVERTER_BOX_H
#define OBOETESTER_FORMAT_CONVERTER_BOX_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "flowgraph/SinkFloat.h"
#include "flowgraph/SinkI16.h"
#include "flowgraph/SinkI24.h"
#include "flowgraph/SinkI32.h"
#include "flowgraph/SourceFloat.h"
#include "flowgraph/SourceI16.h"
#include "flowgraph/SourceI24.h"
#include "flowgraph/SourceI32.h"
/**
* Use flowgraph modules to convert between the various data formats.
*
* Note that this does not do channel conversions.
*/
class FormatConverterBox {
public:
FormatConverterBox(int32_t numSamples,
oboe::AudioFormat inputFormat,
oboe::AudioFormat outputFormat);
/**
* @return internal buffer used to store input data
*/
void *getOutputBuffer() {
return (void *) mOutputBuffer.get();
};
/**
* @return internal buffer used to store output data
*/
void *getInputBuffer() {
return (void *) mInputBuffer.get();
};
/** Convert the data from inputFormat to outputFormat
* using both internal buffers.
*/
int32_t convertInternalBuffers(int32_t numSamples);
/**
* Convert data from external buffer into internal output buffer.
* @param numSamples
* @param inputBuffer
* @return
*/
int32_t convertToInternalOutput(int32_t numSamples, const void *inputBuffer);
/**
*
* Convert data from internal input buffer into external output buffer.
* @param outputBuffer
* @param numSamples
* @return
*/
int32_t convertFromInternalInput(void *outputBuffer, int32_t numSamples);
/**
* Convert data formats between the specified external buffers.
* @param outputBuffer
* @param numSamples
* @param inputBuffer
* @return
*/
int32_t convert(void *outputBuffer, int32_t numSamples, const void *inputBuffer);
private:
oboe::AudioFormat mInputFormat{oboe::AudioFormat::Invalid};
oboe::AudioFormat mOutputFormat{oboe::AudioFormat::Invalid};
std::unique_ptr<uint8_t[]> mInputBuffer;
std::unique_ptr<uint8_t[]> mOutputBuffer;
std::unique_ptr<oboe::flowgraph::FlowGraphSourceBuffered> mSource;
std::unique_ptr<oboe::flowgraph::FlowGraphSink> mSink;
};
#endif //OBOETESTER_FORMAT_CONVERTER_BOX_H

View File

@ -0,0 +1,66 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "FullDuplexAnalyzer.h"
oboe::Result FullDuplexAnalyzer::start() {
getLoopbackProcessor()->setSampleRate(getOutputStream()->getSampleRate());
getLoopbackProcessor()->prepareToTest();
return FullDuplexStream::start();
}
oboe::DataCallbackResult FullDuplexAnalyzer::onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames) {
int32_t inputStride = getInputStream()->getChannelCount();
int32_t outputStride = getOutputStream()->getChannelCount();
const float *inputFloat = inputData;
float *outputFloat = outputData;
(void) getLoopbackProcessor()->process(inputFloat, inputStride, numInputFrames,
outputFloat, outputStride, numOutputFrames);
// write the first channel of output and input to the stereo recorder
if (mRecording != nullptr) {
float buffer[2];
int numBoth = std::min(numInputFrames, numOutputFrames);
for (int i = 0; i < numBoth; i++) {
buffer[0] = *outputFloat;
outputFloat += outputStride;
buffer[1] = *inputFloat;
inputFloat += inputStride;
mRecording->write(buffer, 1);
}
// Handle mismatch in in numFrames.
buffer[0] = 0.0f; // gap in output
for (int i = numBoth; i < numInputFrames; i++) {
buffer[1] = *inputFloat;
inputFloat += inputStride;
mRecording->write(buffer, 1);
}
buffer[1] = 0.0f; // gap in input
for (int i = numBoth; i < numOutputFrames; i++) {
buffer[0] = *outputFloat;
outputFloat += outputStride;
mRecording->write(buffer, 1);
}
}
return oboe::DataCallbackResult::Continue;
};

View File

@ -0,0 +1,64 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FULL_DUPLEX_ANALYZER_H
#define OBOETESTER_FULL_DUPLEX_ANALYZER_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FullDuplexStream.h"
#include "analyzer/LatencyAnalyzer.h"
#include "MultiChannelRecording.h"
class FullDuplexAnalyzer : public FullDuplexStream {
public:
FullDuplexAnalyzer(LoopbackProcessor *processor)
: mLoopbackProcessor(processor) {
setMNumInputBurstsCushion(1);
}
/**
* Called when data is available on both streams.
* Caller should override this method.
*/
oboe::DataCallbackResult onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames
) override;
oboe::Result start() override;
LoopbackProcessor *getLoopbackProcessor() {
return mLoopbackProcessor;
}
void setRecording(MultiChannelRecording *recording) {
mRecording = recording;
}
private:
MultiChannelRecording *mRecording = nullptr;
LoopbackProcessor * const mLoopbackProcessor;
};
#endif //OBOETESTER_FULL_DUPLEX_ANALYZER_H

View File

@ -0,0 +1,46 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "FullDuplexEcho.h"
oboe::Result FullDuplexEcho::start() {
int32_t delayFrames = (int32_t) (kMaxDelayTimeSeconds * getOutputStream()->getSampleRate());
mDelayLine = std::make_unique<InterpolatingDelayLine>(delayFrames);
return FullDuplexStream::start();
}
oboe::DataCallbackResult FullDuplexEcho::onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames) {
int32_t framesToEcho = std::min(numInputFrames, numOutputFrames);
float *inputFloat = (float *)inputData;
float *outputFloat = (float *)outputData;
// zero out entire output array
memset(outputFloat, 0, numOutputFrames * getOutputStream()->getBytesPerFrame());
int32_t inputStride = getInputStream()->getChannelCount();
int32_t outputStride = getOutputStream()->getChannelCount();
float delayFrames = mDelayTimeSeconds * getOutputStream()->getSampleRate();
while (framesToEcho-- > 0) {
*outputFloat = mDelayLine->process(delayFrames, *inputFloat); // mono delay
inputFloat += inputStride;
outputFloat += outputStride;
}
return oboe::DataCallbackResult::Continue;
};

View File

@ -0,0 +1,57 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FULL_DUPLEX_ECHO_H
#define OBOETESTER_FULL_DUPLEX_ECHO_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FullDuplexStream.h"
#include "InterpolatingDelayLine.h"
class FullDuplexEcho : public FullDuplexStream {
public:
FullDuplexEcho() {
setMNumInputBurstsCushion(0);
}
/**
* Called when data is available on both streams.
* Caller should override this method.
*/
oboe::DataCallbackResult onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames
) override;
oboe::Result start() override;
void setDelayTime(double delayTimeSeconds) {
mDelayTimeSeconds = delayTimeSeconds;
}
private:
std::unique_ptr<InterpolatingDelayLine> mDelayLine;
static constexpr double kMaxDelayTimeSeconds = 4.0;
double mDelayTimeSeconds = kMaxDelayTimeSeconds;
};
#endif //OBOETESTER_FULL_DUPLEX_ECHO_H

View File

@ -0,0 +1,152 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "FullDuplexStream.h"
oboe::ResultWithValue<int32_t> FullDuplexStream::readInput(int32_t numFrames) {
oboe::ResultWithValue<int32_t> result = getInputStream()->read(
mInputConverter->getInputBuffer(),
numFrames,
0 /* timeout */);
if (result == oboe::Result::OK) {
int32_t numSamples = result.value() * getInputStream()->getChannelCount();
mInputConverter->convertInternalBuffers(numSamples);
}
return result;
}
oboe::DataCallbackResult FullDuplexStream::onAudioReady(
oboe::AudioStream *outputStream,
void *audioData,
int numFrames) {
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Continue;
int32_t actualFramesRead = 0;
// Silence the output.
int32_t numBytes = numFrames * outputStream->getBytesPerFrame();
memset(audioData, 0 /* value */, numBytes);
if (mCountCallbacksToDrain > 0) {
// Drain the input.
int32_t totalFramesRead = 0;
do {
oboe::ResultWithValue<int32_t> result = readInput(numFrames);
if (!result) {
// Ignore errors because input stream may not be started yet.
break;
}
actualFramesRead = result.value();
totalFramesRead += actualFramesRead;
} while (actualFramesRead > 0);
// Only counts if we actually got some data.
if (totalFramesRead > 0) {
mCountCallbacksToDrain--;
}
} else if (mCountInputBurstsCushion > 0) {
// Let the input fill up a bit so we are not so close to the write pointer.
mCountInputBurstsCushion--;
} else if (mCountCallbacksToDiscard > 0) {
mCountCallbacksToDiscard--;
// Ignore. Allow the input to reach to equilibrium with the output.
oboe::ResultWithValue<int32_t> resultAvailable = getInputStream()->getAvailableFrames();
if (!resultAvailable) {
LOGE("%s() getAvailableFrames() returned %s\n",
__func__, convertToText(resultAvailable.error()));
callbackResult = oboe::DataCallbackResult::Stop;
} else {
int32_t framesAvailable = resultAvailable.value();
if (framesAvailable >= mMinimumFramesBeforeRead) {
oboe::ResultWithValue<int32_t> resultRead = readInput(numFrames);
if (!resultRead) {
LOGE("%s() read() returned %s\n", __func__, convertToText(resultRead.error()));
callbackResult = oboe::DataCallbackResult::Stop;
}
}
}
} else {
int32_t framesRead = 0;
oboe::ResultWithValue<int32_t> resultAvailable = getInputStream()->getAvailableFrames();
if (!resultAvailable) {
LOGE("%s() getAvailableFrames() returned %s\n", __func__, convertToText(resultAvailable.error()));
callbackResult = oboe::DataCallbackResult::Stop;
} else {
int32_t framesAvailable = resultAvailable.value();
if (framesAvailable >= mMinimumFramesBeforeRead) {
// Read data into input buffer.
oboe::ResultWithValue<int32_t> resultRead = readInput(numFrames);
if (!resultRead) {
LOGE("%s() read() returned %s\n", __func__, convertToText(resultRead.error()));
callbackResult = oboe::DataCallbackResult::Stop;
} else {
framesRead = resultRead.value();
}
}
}
if (callbackResult == oboe::DataCallbackResult::Continue) {
callbackResult = onBothStreamsReady(
(const float *) mInputConverter->getOutputBuffer(),
framesRead,
(float *) mOutputConverter->getInputBuffer(), numFrames);
mOutputConverter->convertFromInternalInput( audioData,
numFrames * getOutputStream()->getChannelCount());
}
}
if (callbackResult == oboe::DataCallbackResult::Stop) {
getInputStream()->requestStop();
}
return callbackResult;
}
oboe::Result FullDuplexStream::start() {
mCountCallbacksToDrain = kNumCallbacksToDrain;
mCountInputBurstsCushion = mNumInputBurstsCushion;
mCountCallbacksToDiscard = kNumCallbacksToDiscard;
// Determine maximum size that could possibly be called.
int32_t bufferSize = getOutputStream()->getBufferCapacityInFrames()
* getOutputStream()->getChannelCount();
mInputConverter = std::make_unique<FormatConverterBox>(bufferSize,
getInputStream()->getFormat(),
oboe::AudioFormat::Float);
mOutputConverter = std::make_unique<FormatConverterBox>(bufferSize,
oboe::AudioFormat::Float,
getOutputStream()->getFormat());
oboe::Result result = getInputStream()->requestStart();
if (result != oboe::Result::OK) {
return result;
}
return getOutputStream()->requestStart();
}
oboe::Result FullDuplexStream::stop() {
getOutputStream()->requestStop(); // TODO result?
return getInputStream()->requestStop();
}
int32_t FullDuplexStream::getMNumInputBurstsCushion() const {
return mNumInputBurstsCushion;
}
void FullDuplexStream::setMNumInputBurstsCushion(int32_t numBursts) {
FullDuplexStream::mNumInputBurstsCushion = numBursts;
}

View File

@ -0,0 +1,119 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_FULL_DUPLEX_STREAM_H
#define OBOETESTER_FULL_DUPLEX_STREAM_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FormatConverterBox.h"
class FullDuplexStream : public oboe::AudioStreamCallback {
public:
FullDuplexStream() {}
virtual ~FullDuplexStream() = default;
void setInputStream(oboe::AudioStream *stream) {
mInputStream = stream;
}
oboe::AudioStream *getInputStream() {
return mInputStream;
}
void setOutputStream(oboe::AudioStream *stream) {
mOutputStream = stream;
}
oboe::AudioStream *getOutputStream() {
return mOutputStream;
}
virtual oboe::Result start();
virtual oboe::Result stop();
oboe::ResultWithValue<int32_t> readInput(int32_t numFrames);
/**
* Called when data is available on both streams.
* Caller should override this method.
*/
virtual oboe::DataCallbackResult onBothStreamsReady(
const float *inputData,
int numInputFrames,
float *outputData,
int numOutputFrames
) = 0;
/**
* Called by Oboe when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
int32_t getMNumInputBurstsCushion() const;
/**
* Number of bursts to leave in the input buffer as a cushion.
* Typically 0 for latency measurements
* or 1 for glitch tests.
*
* @param mNumInputBurstsCushion
*/
void setMNumInputBurstsCushion(int32_t mNumInputBurstsCushion);
void setMinimumFramesBeforeRead(int32_t numFrames) {
mMinimumFramesBeforeRead = numFrames;
}
int32_t getMinimumFramesBeforeRead() const {
return mMinimumFramesBeforeRead;
}
private:
// TODO add getters and setters
static constexpr int32_t kNumCallbacksToDrain = 20;
static constexpr int32_t kNumCallbacksToDiscard = 30;
// let input fill back up, usually 0 or 1
int32_t mNumInputBurstsCushion = 0;
int32_t mMinimumFramesBeforeRead = 0;
// We want to reach a state where the input buffer is empty and
// the output buffer is full.
// These are used in order.
// Drain several callback so that input is empty.
int32_t mCountCallbacksToDrain = kNumCallbacksToDrain;
// Let the input fill back up slightly so we don't run dry.
int32_t mCountInputBurstsCushion = mNumInputBurstsCushion;
// Discard some callbacks so the input and output reach equilibrium.
int32_t mCountCallbacksToDiscard = kNumCallbacksToDiscard;
oboe::AudioStream *mInputStream = nullptr;
oboe::AudioStream *mOutputStream = nullptr;
std::unique_ptr<FormatConverterBox> mInputConverter;
std::unique_ptr<FormatConverterBox> mOutputConverter;
};
#endif //OBOETESTER_FULL_DUPLEX_STREAM_H

View File

@ -0,0 +1,42 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "InputStreamCallbackAnalyzer.h"
oboe::DataCallbackResult InputStreamCallbackAnalyzer::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
int32_t channelCount = audioStream->getChannelCount();
printScheduler();
mInputConverter->convertToInternalOutput(numFrames * channelCount, audioData);
float *floatData = (float *) mInputConverter->getOutputBuffer();
if (mRecording != nullptr) {
mRecording->write(floatData, numFrames);
}
int32_t sampleIndex = 0;
for (int iFrame = 0; iFrame < numFrames; iFrame++) {
for (int iChannel = 0; iChannel < channelCount; iChannel++) {
float sample = floatData[sampleIndex++];
mPeakDetectors[iChannel].process(sample);
}
}
audioStream->waitForAvailableFrames(mMinimumFramesBeforeRead, oboe::kNanosPerSecond);
return oboe::DataCallbackResult::Continue;
}

View File

@ -0,0 +1,87 @@
/*
* Copyright 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_INPUTSTREAMCALLBACKANALYZER_H
#define NATIVEOBOE_INPUTSTREAMCALLBACKANALYZER_H
#include <unistd.h>
#include <sys/types.h>
// TODO #include "flowgraph/FlowGraph.h"
#include "oboe/Oboe.h"
#include "analyzer/PeakDetector.h"
#include "FormatConverterBox.h"
#include "MultiChannelRecording.h"
#include "OboeTesterStreamCallback.h"
constexpr int kMaxInputChannels = 8;
class InputStreamCallbackAnalyzer : public OboeTesterStreamCallback {
public:
void reset() {
for (auto detector : mPeakDetectors) {
detector.reset();
}
OboeTesterStreamCallback::reset();
}
void setup(int32_t maxFramesPerCallback,
int32_t channelCount,
oboe::AudioFormat inputFormat) {
int32_t bufferSize = maxFramesPerCallback * channelCount;
mInputConverter = std::make_unique<FormatConverterBox>(bufferSize,
inputFormat,
oboe::AudioFormat::Float);
}
/**
* Called by Oboe when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
void setRecording(MultiChannelRecording *recording) {
mRecording = recording;
}
double getPeakLevel(int index) {
return mPeakDetectors[index].getLevel();
}
void setMinimumFramesBeforeRead(int32_t numFrames) {
mMinimumFramesBeforeRead = numFrames;
}
int32_t getMinimumFramesBeforeRead() {
return mMinimumFramesBeforeRead;
}
public:
PeakDetector mPeakDetectors[kMaxInputChannels];
MultiChannelRecording *mRecording = nullptr;
private:
std::unique_ptr<FormatConverterBox> mInputConverter;
int32_t mMinimumFramesBeforeRead = 0;
};
#endif //NATIVEOBOE_INPUTSTREAMCALLBACKANALYZER_H

View File

@ -0,0 +1,42 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "InterpolatingDelayLine.h"
InterpolatingDelayLine::InterpolatingDelayLine(int32_t delaySize) {
mDelaySize = delaySize;
mDelayLine = std::make_unique<float[]>(delaySize);
}
float InterpolatingDelayLine::process(float delay, float input) {
float *writeAddress = mDelayLine.get() + mCursor;
*writeAddress = input;
mDelayLine.get()[mCursor] = input;
int32_t delayInt = std::min(mDelaySize - 1, (int32_t) delay);
int32_t readIndex = mCursor - delayInt;
if (readIndex < 0) {
readIndex += mDelaySize;
}
// TODO interpolate
float *readAddress = mDelayLine.get() + readIndex;
float output = *readAddress;
mCursor++;
if (mCursor >= mDelaySize) {
mCursor = 0;
}
return output;
};

View File

@ -0,0 +1,48 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_INTERPOLATING_DELAY_LINE_H
#define OBOETESTER_INTERPOLATING_DELAY_LINE_H
#include <memory>
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
#include "FullDuplexStream.h"
/**
* Monophonic delay line.
*/
class InterpolatingDelayLine {
public:
explicit InterpolatingDelayLine(int32_t delaySize);
/**
* @param input sample to be written to the delay line
* @param delay number of samples to delay the output
* @return delayed value
*/
float process(float delay, float input);
private:
std::unique_ptr<float[]> mDelayLine;
int32_t mCursor = 0;
int32_t mDelaySize = 0;
};
#endif //OBOETESTER_INTERPOLATING_DELAY_LINE_H

View File

@ -0,0 +1,161 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_MULTICHANNEL_RECORDING_H
#define NATIVEOBOE_MULTICHANNEL_RECORDING_H
#include <memory.h>
#include <unistd.h>
#include <sys/types.h>
/**
* Store multi-channel audio data in float format.
* The most recent data will be saved.
* Old data may be overwritten.
*
* Note that this is not thread safe. Do not read and write from separate threads.
*/
class MultiChannelRecording {
public:
MultiChannelRecording(int32_t channelCount, int32_t maxFrames)
: mChannelCount(channelCount)
, mMaxFrames(maxFrames) {
mData = new float[channelCount * maxFrames];
}
~MultiChannelRecording() {
delete[] mData;
}
void rewind() {
mReadCursorFrames = mWriteCursorFrames - getSizeInFrames();
}
void clear() {
mReadCursorFrames = 0;
mWriteCursorFrames = 0;
}
int32_t getChannelCount() {
return mChannelCount;
}
int32_t getSizeInFrames() {
return (int32_t) std::min(mWriteCursorFrames, static_cast<int64_t>(mMaxFrames));
}
int32_t getReadIndex() {
return mReadCursorFrames % mMaxFrames;
}
int32_t getWriteIndex() {
return mWriteCursorFrames % mMaxFrames;
}
/**
* Write numFrames from the short buffer into the recording.
* Overwrite old data if necessary.
* Convert shorts to floats.
*
* @param buffer
* @param numFrames
* @return number of frames actually written.
*/
int32_t write(int16_t *buffer, int32_t numFrames) {
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
int32_t indexFrame = getWriteIndex();
// contiguous writes
int32_t framesToEndOfBuffer = mMaxFrames - indexFrame;
int32_t framesNow = std::min(framesLeft, framesToEndOfBuffer);
int32_t numSamples = framesNow * mChannelCount;
int32_t sampleIndex = indexFrame * mChannelCount;
for (int i = 0; i < numSamples; i++) {
mData[sampleIndex++] = *buffer++ * (1.0f / 32768);
}
mWriteCursorFrames += framesNow;
framesLeft -= framesNow;
}
return numFrames - framesLeft;
}
/**
* Write all numFrames from the float buffer into the recording.
* Overwrite old data if full.
* @param buffer
* @param numFrames
* @return number of frames actually written.
*/
int32_t write(float *buffer, int32_t numFrames) {
int32_t framesLeft = numFrames;
while (framesLeft > 0) {
int32_t indexFrame = getWriteIndex();
// contiguous writes
int32_t framesToEnd = mMaxFrames - indexFrame;
int32_t framesNow = std::min(framesLeft, framesToEnd);
int32_t numSamples = framesNow * mChannelCount;
int32_t sampleIndex = indexFrame * mChannelCount;
memcpy(&mData[sampleIndex],
buffer,
(numSamples * sizeof(float)));
buffer += numSamples;
mWriteCursorFrames += framesNow;
framesLeft -= framesNow;
}
return numFrames;
}
/**
* Read numFrames from the recording into the buffer, if there is enough data.
* Start at the cursor position, aligned up to the next frame.
* @param buffer
* @param numFrames
* @return number of frames actually read.
*/
int32_t read(float *buffer, int32_t numFrames) {
int32_t framesRead = 0;
int32_t framesLeft = std::min(numFrames,
std::min(mMaxFrames, (int32_t)(mWriteCursorFrames - mReadCursorFrames)));
while (framesLeft > 0) {
int32_t indexFrame = getReadIndex();
// contiguous reads
int32_t framesToEnd = mMaxFrames - indexFrame;
int32_t framesNow = std::min(framesLeft, framesToEnd);
int32_t numSamples = framesNow * mChannelCount;
int32_t sampleIndex = indexFrame * mChannelCount;
memcpy(buffer,
&mData[sampleIndex],
(numSamples * sizeof(float)));
mReadCursorFrames += framesNow;
framesLeft -= framesNow;
framesRead += framesNow;
}
return framesRead;
}
private:
float *mData = nullptr;
int64_t mReadCursorFrames = 0;
int64_t mWriteCursorFrames = 0; // monotonically increasing
const int32_t mChannelCount;
const int32_t mMaxFrames;
};
#endif //NATIVEOBOE_MULTICHANNEL_RECORDING_H

View File

@ -0,0 +1,702 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Set to 1 for debugging race condition #1180 with mAAudioStream.
// See also AudioStreamAAudio.cpp in Oboe.
// This was left in the code so that we could test the fix again easily in the future.
// We could not trigger the race condition without adding these get calls and the sleeps.
#define DEBUG_CLOSE_RACE 0
#include <fstream>
#include <iostream>
#if DEBUG_CLOSE_RACE
#include <thread>
#endif // DEBUG_CLOSE_RACE
#include <vector>
#include <common/AudioClock.h>
#include "util/WaveFileWriter.h"
#include "NativeAudioContext.h"
using namespace oboe;
static oboe::AudioApi convertNativeApiToAudioApi(int nativeApi) {
switch (nativeApi) {
default:
case NATIVE_MODE_UNSPECIFIED:
return oboe::AudioApi::Unspecified;
case NATIVE_MODE_AAUDIO:
return oboe::AudioApi::AAudio;
case NATIVE_MODE_OPENSLES:
return oboe::AudioApi::OpenSLES;
}
}
class MyOboeOutputStream : public WaveFileOutputStream {
public:
void write(uint8_t b) override {
mData.push_back(b);
}
int32_t length() {
return (int32_t) mData.size();
}
uint8_t *getData() {
return mData.data();
}
private:
std::vector<uint8_t> mData;
};
bool ActivityContext::mUseCallback = true;
int ActivityContext::callbackSize = 0;
std::shared_ptr<oboe::AudioStream> ActivityContext::getOutputStream() {
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
if (oboeStream->getDirection() == oboe::Direction::Output) {
return oboeStream;
}
}
return nullptr;
}
std::shared_ptr<oboe::AudioStream> ActivityContext::getInputStream() {
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
if (oboeStream != nullptr) {
if (oboeStream->getDirection() == oboe::Direction::Input) {
return oboeStream;
}
}
}
return nullptr;
}
void ActivityContext::freeStreamIndex(int32_t streamIndex) {
mOboeStreams[streamIndex].reset();
mOboeStreams.erase(streamIndex);
}
int32_t ActivityContext::allocateStreamIndex() {
return mNextStreamHandle++;
}
void ActivityContext::close(int32_t streamIndex) {
stopBlockingIOThread();
std::shared_ptr<oboe::AudioStream> oboeStream = getStream(streamIndex);
if (oboeStream != nullptr) {
oboeStream->close();
LOGD("ActivityContext::%s() delete stream %d ", __func__, streamIndex);
freeStreamIndex(streamIndex);
}
}
bool ActivityContext::isMMapUsed(int32_t streamIndex) {
std::shared_ptr<oboe::AudioStream> oboeStream = getStream(streamIndex);
if (oboeStream == nullptr) return false;
if (oboeStream->getAudioApi() != AudioApi::AAudio) return false;
return AAudioExtensions::getInstance().isMMapUsed(oboeStream.get());
}
oboe::Result ActivityContext::pause() {
oboe::Result result = oboe::Result::OK;
stopBlockingIOThread();
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
result = oboeStream->requestPause();
}
return result;
}
oboe::Result ActivityContext::stopAllStreams() {
oboe::Result result = oboe::Result::OK;
stopBlockingIOThread();
for (auto entry : mOboeStreams) {
std::shared_ptr<oboe::AudioStream> oboeStream = entry.second;
result = oboeStream->requestStop();
}
return result;
}
void ActivityContext::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
// We needed the proxy because we did not know the channelCount when we setup the Builder.
if (mUseCallback) {
builder.setDataCallback(&oboeCallbackProxy);
}
}
int ActivityContext::open(jint nativeApi,
jint sampleRate,
jint channelCount,
jint format,
jint sharingMode,
jint performanceMode,
jint inputPreset,
jint usage,
jint deviceId,
jint sessionId,
jint framesPerBurst,
jboolean channelConversionAllowed,
jboolean formatConversionAllowed,
jint rateConversionQuality,
jboolean isMMap,
jboolean isInput) {
oboe::AudioApi audioApi = oboe::AudioApi::Unspecified;
switch (nativeApi) {
case NATIVE_MODE_UNSPECIFIED:
case NATIVE_MODE_AAUDIO:
case NATIVE_MODE_OPENSLES:
audioApi = convertNativeApiToAudioApi(nativeApi);
break;
default:
return (jint) oboe::Result::ErrorOutOfRange;
}
int32_t streamIndex = allocateStreamIndex();
if (streamIndex < 0) {
LOGE("ActivityContext::open() stream array full");
return (jint) oboe::Result::ErrorNoFreeHandles;
}
if (channelCount < 0 || channelCount > 256) {
LOGE("ActivityContext::open() channels out of range");
return (jint) oboe::Result::ErrorOutOfRange;
}
// Create an audio stream.
oboe::AudioStreamBuilder builder;
builder.setChannelCount(channelCount)
->setDirection(isInput ? oboe::Direction::Input : oboe::Direction::Output)
->setSharingMode((oboe::SharingMode) sharingMode)
->setPerformanceMode((oboe::PerformanceMode) performanceMode)
->setInputPreset((oboe::InputPreset)inputPreset)
->setUsage((oboe::Usage)usage)
->setDeviceId(deviceId)
->setSessionId((oboe::SessionId) sessionId)
->setSampleRate(sampleRate)
->setFormat((oboe::AudioFormat) format)
->setChannelConversionAllowed(channelConversionAllowed)
->setFormatConversionAllowed(formatConversionAllowed)
->setSampleRateConversionQuality((oboe::SampleRateConversionQuality) rateConversionQuality)
;
if (mUseCallback) {
builder.setFramesPerCallback(callbackSize);
}
configureBuilder(isInput, builder);
builder.setAudioApi(audioApi);
// Temporarily set the AAudio MMAP policy to disable MMAP or not.
bool oldMMapEnabled = AAudioExtensions::getInstance().isMMapEnabled();
AAudioExtensions::getInstance().setMMapEnabled(isMMap);
// Record time for opening.
if (isInput) {
mInputOpenedAt = oboe::AudioClock::getNanoseconds();
} else {
mOutputOpenedAt = oboe::AudioClock::getNanoseconds();
}
// Open a stream based on the builder settings.
std::shared_ptr<oboe::AudioStream> oboeStream;
Result result = builder.openStream(oboeStream);
AAudioExtensions::getInstance().setMMapEnabled(oldMMapEnabled);
if (result != Result::OK) {
freeStreamIndex(streamIndex);
streamIndex = -1;
} else {
mOboeStreams[streamIndex] = oboeStream; // save shared_ptr
mChannelCount = oboeStream->getChannelCount(); // FIXME store per stream
mFramesPerBurst = oboeStream->getFramesPerBurst();
mSampleRate = oboeStream->getSampleRate();
createRecording();
finishOpen(isInput, oboeStream.get());
}
if (!mUseCallback) {
int numSamples = getFramesPerBlock() * mChannelCount;
dataBuffer = std::make_unique<float[]>(numSamples);
}
return (result != Result::OK) ? (int)result : streamIndex;
}
oboe::Result ActivityContext::start() {
oboe::Result result = oboe::Result::OK;
std::shared_ptr<oboe::AudioStream> inputStream = getInputStream();
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (inputStream == nullptr && outputStream == nullptr) {
LOGD("%s() - no streams defined", __func__);
return oboe::Result::ErrorInvalidState; // not open
}
configureForStart();
audioStreamGateway.reset();
result = startStreams();
if (!mUseCallback && result == oboe::Result::OK) {
// Instead of using the callback, start a thread that writes the stream.
threadEnabled.store(true);
dataThread = new std::thread(threadCallback, this);
}
#if DEBUG_CLOSE_RACE
// Also put a sleep for 400 msec in AudioStreamAAudio::updateFramesRead().
if (outputStream != nullptr) {
std::thread raceDebugger([outputStream]() {
while (outputStream->getState() != StreamState::Closed) {
int64_t framesRead = outputStream->getFramesRead();
LOGD("raceDebugger, framesRead = %d, state = %d",
(int) framesRead, (int) outputStream->getState());
}
});
raceDebugger.detach();
}
#endif // DEBUG_CLOSE_RACE
return result;
}
int32_t ActivityContext::saveWaveFile(const char *filename) {
if (mRecording == nullptr) {
LOGW("ActivityContext::saveWaveFile(%s) but no recording!", filename);
return -1;
}
if (mRecording->getSizeInFrames() == 0) {
LOGW("ActivityContext::saveWaveFile(%s) but no frames!", filename);
return -2;
}
MyOboeOutputStream outStream;
WaveFileWriter writer(&outStream);
writer.setFrameRate(mSampleRate);
writer.setSamplesPerFrame(mRecording->getChannelCount());
writer.setBitsPerSample(24);
float buffer[mRecording->getChannelCount()];
// Read samples from start to finish.
mRecording->rewind();
for (int32_t frameIndex = 0; frameIndex < mRecording->getSizeInFrames(); frameIndex++) {
mRecording->read(buffer, 1 /* numFrames */);
for (int32_t i = 0; i < mRecording->getChannelCount(); i++) {
writer.write(buffer[i]);
}
}
writer.close();
if (outStream.length() > 0) {
auto myfile = std::ofstream(filename, std::ios::out | std::ios::binary);
myfile.write((char *) outStream.getData(), outStream.length());
myfile.close();
}
return outStream.length();
}
double ActivityContext::getTimestampLatency(int32_t streamIndex) {
std::shared_ptr<oboe::AudioStream> oboeStream = getStream(streamIndex);
if (oboeStream != nullptr) {
auto result = oboeStream->calculateLatencyMillis();
return (!result) ? -1.0 : result.value();
}
return -1.0;
}
// =================================================================== ActivityTestOutput
void ActivityTestOutput::close(int32_t streamIndex) {
ActivityContext::close(streamIndex);
manyToMulti.reset(nullptr);
monoToMulti.reset(nullptr);
mSinkFloat.reset();
mSinkI16.reset();
mSinkI24.reset();
mSinkI32.reset();
}
void ActivityTestOutput::setChannelEnabled(int channelIndex, bool enabled) {
if (manyToMulti == nullptr) {
return;
}
if (enabled) {
switch (mSignalType) {
case SignalType::Sine:
sineOscillators[channelIndex].frequency.disconnect();
sineOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
case SignalType::Sawtooth:
sawtoothOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
case SignalType::FreqSweep:
mLinearShape.output.connect(&sineOscillators[channelIndex].frequency);
sineOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
case SignalType::PitchSweep:
mExponentialShape.output.connect(&sineOscillators[channelIndex].frequency);
sineOscillators[channelIndex].output.connect(manyToMulti->inputs[channelIndex].get());
break;
default:
break;
}
} else {
manyToMulti->inputs[channelIndex]->disconnect();
}
}
void ActivityTestOutput::configureForStart() {
manyToMulti = std::make_unique<ManyToMultiConverter>(mChannelCount);
mSinkFloat = std::make_shared<SinkFloat>(mChannelCount);
mSinkI16 = std::make_shared<SinkI16>(mChannelCount);
mSinkI24 = std::make_shared<SinkI24>(mChannelCount);
mSinkI32 = std::make_shared<SinkI32>(mChannelCount);
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
mTriangleOscillator.setSampleRate(outputStream->getSampleRate());
mTriangleOscillator.frequency.setValue(1.0/kSweepPeriod);
mTriangleOscillator.amplitude.setValue(1.0);
mTriangleOscillator.setPhase(-1.0);
mLinearShape.setMinimum(0.0);
mLinearShape.setMaximum(outputStream->getSampleRate() * 0.5); // Nyquist
mExponentialShape.setMinimum(110.0);
mExponentialShape.setMaximum(outputStream->getSampleRate() * 0.5); // Nyquist
mTriangleOscillator.output.connect(&(mLinearShape.input));
mTriangleOscillator.output.connect(&(mExponentialShape.input));
{
double frequency = 330.0;
for (int i = 0; i < mChannelCount; i++) {
sineOscillators[i].setSampleRate(outputStream->getSampleRate());
sineOscillators[i].frequency.setValue(frequency);
frequency *= 4.0 / 3.0; // each sine is at a higher frequency
sineOscillators[i].amplitude.setValue(AMPLITUDE_SINE);
setChannelEnabled(i, true);
}
}
manyToMulti->output.connect(&(mSinkFloat.get()->input));
manyToMulti->output.connect(&(mSinkI16.get()->input));
manyToMulti->output.connect(&(mSinkI24.get()->input));
manyToMulti->output.connect(&(mSinkI32.get()->input));
mSinkFloat->pullReset();
mSinkI16->pullReset();
mSinkI24->pullReset();
mSinkI32->pullReset();
configureStreamGateway();
}
void ActivityTestOutput::configureStreamGateway() {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (outputStream->getFormat() == oboe::AudioFormat::I16) {
audioStreamGateway.setAudioSink(mSinkI16);
} else if (outputStream->getFormat() == oboe::AudioFormat::I24) {
audioStreamGateway.setAudioSink(mSinkI24);
} else if (outputStream->getFormat() == oboe::AudioFormat::I32) {
audioStreamGateway.setAudioSink(mSinkI32);
} else if (outputStream->getFormat() == oboe::AudioFormat::Float) {
audioStreamGateway.setAudioSink(mSinkFloat);
}
if (mUseCallback) {
oboeCallbackProxy.setCallback(&audioStreamGateway);
}
}
void ActivityTestOutput::runBlockingIO() {
int32_t framesPerBlock = getFramesPerBlock();
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Continue;
std::shared_ptr<oboe::AudioStream> oboeStream = getOutputStream();
if (oboeStream == nullptr) {
LOGE("%s() : no stream found\n", __func__);
return;
}
while (threadEnabled.load()
&& callbackResult == oboe::DataCallbackResult::Continue) {
// generate output by calling the callback
callbackResult = audioStreamGateway.onAudioReady(oboeStream.get(),
dataBuffer.get(),
framesPerBlock);
auto result = oboeStream->write(dataBuffer.get(),
framesPerBlock,
NANOS_PER_SECOND);
if (!result) {
LOGE("%s() returned %s\n", __func__, convertToText(result.error()));
break;
}
int32_t framesWritten = result.value();
if (framesWritten < framesPerBlock) {
LOGE("%s() : write() wrote %d of %d\n", __func__, framesWritten, framesPerBlock);
break;
}
}
}
// ======================================================================= ActivityTestInput
void ActivityTestInput::configureForStart() {
mInputAnalyzer.reset();
if (mUseCallback) {
oboeCallbackProxy.setCallback(&mInputAnalyzer);
}
mInputAnalyzer.setRecording(mRecording.get());
}
void ActivityTestInput::runBlockingIO() {
int32_t framesPerBlock = getFramesPerBlock();
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Continue;
std::shared_ptr<oboe::AudioStream> oboeStream = getInputStream();
if (oboeStream == nullptr) {
LOGE("%s() : no stream found\n", __func__);
return;
}
while (threadEnabled.load()
&& callbackResult == oboe::DataCallbackResult::Continue) {
// Avoid glitches by waiting until there is extra data in the FIFO.
auto err = oboeStream->waitForAvailableFrames(mMinimumFramesBeforeRead, kNanosPerSecond);
if (!err) break;
// read from input
auto result = oboeStream->read(dataBuffer.get(),
framesPerBlock,
NANOS_PER_SECOND);
if (!result) {
LOGE("%s() : read() returned %s\n", __func__, convertToText(result.error()));
break;
}
int32_t framesRead = result.value();
if (framesRead < framesPerBlock) { // timeout?
LOGE("%s() : read() read %d of %d\n", __func__, framesRead, framesPerBlock);
break;
}
// analyze input
callbackResult = mInputAnalyzer.onAudioReady(oboeStream.get(),
dataBuffer.get(),
framesRead);
}
}
oboe::Result ActivityRecording::stopPlayback() {
oboe::Result result = oboe::Result::OK;
if (playbackStream != nullptr) {
result = playbackStream->requestStop();
playbackStream->close();
mPlayRecordingCallback.setRecording(nullptr);
delete playbackStream;
playbackStream = nullptr;
}
return result;
}
oboe::Result ActivityRecording::startPlayback() {
stop();
oboe::AudioStreamBuilder builder;
builder.setChannelCount(mChannelCount)
->setSampleRate(mSampleRate)
->setFormat(oboe::AudioFormat::Float)
->setCallback(&mPlayRecordingCallback)
->setAudioApi(oboe::AudioApi::OpenSLES);
oboe::Result result = builder.openStream(&playbackStream);
if (result != oboe::Result::OK) {
delete playbackStream;
playbackStream = nullptr;
} else if (playbackStream != nullptr) {
if (mRecording != nullptr) {
mRecording->rewind();
mPlayRecordingCallback.setRecording(mRecording.get());
result = playbackStream->requestStart();
}
}
return result;
}
// ======================================================================= ActivityTapToTone
void ActivityTapToTone::configureForStart() {
monoToMulti = std::make_unique<MonoToMultiConverter>(mChannelCount);
mSinkFloat = std::make_shared<SinkFloat>(mChannelCount);
mSinkI16 = std::make_shared<SinkI16>(mChannelCount);
mSinkI24 = std::make_shared<SinkI24>(mChannelCount);
mSinkI32 = std::make_shared<SinkI32>(mChannelCount);
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
sawPingGenerator.setSampleRate(outputStream->getSampleRate());
sawPingGenerator.frequency.setValue(FREQUENCY_SAW_PING);
sawPingGenerator.amplitude.setValue(AMPLITUDE_SAW_PING);
sawPingGenerator.output.connect(&(monoToMulti->input));
monoToMulti->output.connect(&(mSinkFloat.get()->input));
monoToMulti->output.connect(&(mSinkI16.get()->input));
monoToMulti->output.connect(&(mSinkI24.get()->input));
monoToMulti->output.connect(&(mSinkI32.get()->input));
mSinkFloat->pullReset();
mSinkI16->pullReset();
mSinkI24->pullReset();
mSinkI32->pullReset();
configureStreamGateway();
}
// ======================================================================= ActivityRoundTripLatency
void ActivityFullDuplex::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
if (isInput) {
// Ideally the output streams should be opened first.
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (outputStream != nullptr) {
// Make sure the capacity is bigger than two bursts.
int32_t burst = outputStream->getFramesPerBurst();
builder.setBufferCapacityInFrames(2 * burst);
}
}
}
// ======================================================================= ActivityEcho
void ActivityEcho::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexEcho.get() == nullptr) {
mFullDuplexEcho = std::make_unique<FullDuplexEcho>();
}
// only output uses a callback, input is polled
if (!isInput) {
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexEcho.get());
}
}
void ActivityEcho::finishOpen(bool isInput, oboe::AudioStream *oboeStream) {
if (isInput) {
mFullDuplexEcho->setInputStream(oboeStream);
} else {
mFullDuplexEcho->setOutputStream(oboeStream);
}
}
// ======================================================================= ActivityRoundTripLatency
void ActivityRoundTripLatency::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexLatency.get() == nullptr) {
mFullDuplexLatency = std::make_unique<FullDuplexAnalyzer>(&mEchoAnalyzer);
}
if (!isInput) {
// only output uses a callback, input is polled
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexLatency.get());
}
}
void ActivityRoundTripLatency::finishOpen(bool isInput, AudioStream *oboeStream) {
if (isInput) {
mFullDuplexLatency->setInputStream(oboeStream);
mFullDuplexLatency->setRecording(mRecording.get());
} else {
mFullDuplexLatency->setOutputStream(oboeStream);
}
}
// ======================================================================= ActivityGlitches
void ActivityGlitches::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexGlitches.get() == nullptr) {
mFullDuplexGlitches = std::make_unique<FullDuplexAnalyzer>(&mGlitchAnalyzer);
}
if (!isInput) {
// only output uses a callback, input is polled
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexGlitches.get());
}
}
void ActivityGlitches::finishOpen(bool isInput, oboe::AudioStream *oboeStream) {
if (isInput) {
mFullDuplexGlitches->setInputStream(oboeStream);
mFullDuplexGlitches->setRecording(mRecording.get());
} else {
mFullDuplexGlitches->setOutputStream(oboeStream);
}
}
// ======================================================================= ActivityDataPath
void ActivityDataPath::configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) {
ActivityFullDuplex::configureBuilder(isInput, builder);
if (mFullDuplexDataPath.get() == nullptr) {
mFullDuplexDataPath = std::make_unique<FullDuplexAnalyzer>(&mDataPathAnalyzer);
}
if (!isInput) {
// only output uses a callback, input is polled
builder.setCallback((oboe::AudioStreamCallback *) &oboeCallbackProxy);
oboeCallbackProxy.setCallback(mFullDuplexDataPath.get());
}
}
void ActivityDataPath::finishOpen(bool isInput, oboe::AudioStream *oboeStream) {
if (isInput) {
mFullDuplexDataPath->setInputStream(oboeStream);
mFullDuplexDataPath->setRecording(mRecording.get());
} else {
mFullDuplexDataPath->setOutputStream(oboeStream);
}
}
// =================================================================== ActivityTestDisconnect
void ActivityTestDisconnect::close(int32_t streamIndex) {
ActivityContext::close(streamIndex);
mSinkFloat.reset();
}
void ActivityTestDisconnect::configureForStart() {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
std::shared_ptr<oboe::AudioStream> inputStream = getInputStream();
if (outputStream) {
mSinkFloat = std::make_unique<SinkFloat>(mChannelCount);
sineOscillator = std::make_unique<SineOscillator>();
monoToMulti = std::make_unique<MonoToMultiConverter>(mChannelCount);
sineOscillator->setSampleRate(outputStream->getSampleRate());
sineOscillator->frequency.setValue(440.0);
sineOscillator->amplitude.setValue(AMPLITUDE_SINE);
sineOscillator->output.connect(&(monoToMulti->input));
monoToMulti->output.connect(&(mSinkFloat->input));
mSinkFloat->pullReset();
audioStreamGateway.setAudioSink(mSinkFloat);
} else if (inputStream) {
audioStreamGateway.setAudioSink(nullptr);
}
oboeCallbackProxy.setCallback(&audioStreamGateway);
}

View File

@ -0,0 +1,767 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_NATIVEAUDIOCONTEXT_H
#define NATIVEOBOE_NATIVEAUDIOCONTEXT_H
#include <jni.h>
#include <sys/system_properties.h>
#include <thread>
#include <unordered_map>
#include <vector>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "aaudio/AAudioExtensions.h"
#include "AudioStreamGateway.h"
#include "flowunits/ImpulseOscillator.h"
#include "flowgraph/ManyToMultiConverter.h"
#include "flowgraph/MonoToMultiConverter.h"
#include "flowgraph/SinkFloat.h"
#include "flowgraph/SinkI16.h"
#include "flowgraph/SinkI24.h"
#include "flowgraph/SinkI32.h"
#include "flowunits/ExponentialShape.h"
#include "flowunits/LinearShape.h"
#include "flowunits/SineOscillator.h"
#include "flowunits/SawtoothOscillator.h"
#include "flowunits/TriangleOscillator.h"
#include "FullDuplexAnalyzer.h"
#include "FullDuplexEcho.h"
#include "FullDuplexStream.h"
#include "analyzer/GlitchAnalyzer.h"
#include "analyzer/DataPathAnalyzer.h"
#include "InputStreamCallbackAnalyzer.h"
#include "MultiChannelRecording.h"
#include "OboeStreamCallbackProxy.h"
#include "PlayRecordingCallback.h"
#include "SawPingGenerator.h"
// These must match order in strings.xml and in StreamConfiguration.java
#define NATIVE_MODE_UNSPECIFIED 0
#define NATIVE_MODE_OPENSLES 1
#define NATIVE_MODE_AAUDIO 2
#define MAX_SINE_OSCILLATORS 8
#define AMPLITUDE_SINE 1.0
#define AMPLITUDE_SAWTOOTH 0.5
#define FREQUENCY_SAW_PING 800.0
#define AMPLITUDE_SAW_PING 0.8
#define AMPLITUDE_IMPULSE 0.7
#define NANOS_PER_MICROSECOND ((int64_t) 1000)
#define NANOS_PER_MILLISECOND (1000 * NANOS_PER_MICROSECOND)
#define NANOS_PER_SECOND (1000 * NANOS_PER_MILLISECOND)
#define SECONDS_TO_RECORD 10
/**
* Abstract base class that corresponds to a test at the Java level.
*/
class ActivityContext {
public:
ActivityContext() {}
virtual ~ActivityContext() = default;
std::shared_ptr<oboe::AudioStream> getStream(int32_t streamIndex) {
auto it = mOboeStreams.find(streamIndex);
if (it != mOboeStreams.end()) {
return it->second;
} else {
return nullptr;
}
}
virtual void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder);
/**
* Open a stream with the given parameters.
* @param nativeApi
* @param sampleRate
* @param channelCount
* @param format
* @param sharingMode
* @param performanceMode
* @param inputPreset
* @param deviceId
* @param sessionId
* @param framesPerBurst
* @param channelConversionAllowed
* @param formatConversionAllowed
* @param rateConversionQuality
* @param isMMap
* @param isInput
* @return stream ID
*/
int open(jint nativeApi,
jint sampleRate,
jint channelCount,
jint format,
jint sharingMode,
jint performanceMode,
jint inputPreset,
jint usage,
jint deviceId,
jint sessionId,
jint framesPerBurst,
jboolean channelConversionAllowed,
jboolean formatConversionAllowed,
jint rateConversionQuality,
jboolean isMMap,
jboolean isInput);
virtual void close(int32_t streamIndex);
virtual void configureForStart() {}
oboe::Result start();
oboe::Result pause();
oboe::Result stopAllStreams();
virtual oboe::Result stop() {
return stopAllStreams();
}
double getCpuLoad() {
return oboeCallbackProxy.getCpuLoad();
}
void setWorkload(double workload) {
oboeCallbackProxy.setWorkload(workload);
}
virtual oboe::Result startPlayback() {
return oboe::Result::OK;
}
virtual oboe::Result stopPlayback() {
return oboe::Result::OK;
}
virtual void runBlockingIO() {};
static void threadCallback(ActivityContext *context) {
context->runBlockingIO();
}
void stopBlockingIOThread() {
if (dataThread != nullptr) {
// stop a thread that runs in place of the callback
threadEnabled.store(false); // ask thread to exit its loop
dataThread->join();
dataThread = nullptr;
}
}
virtual double getPeakLevel(int index) {
return 0.0;
}
static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC) {
struct timespec time;
int result = clock_gettime(clockId, &time);
if (result < 0) {
return result;
}
return (time.tv_sec * NANOS_PER_SECOND) + time.tv_nsec;
}
// Calculate time between beginning and when frame[0] occurred.
int32_t calculateColdStartLatencyMillis(int32_t sampleRate,
int64_t beginTimeNanos,
int64_t timeStampPosition,
int64_t timestampNanos) const {
int64_t elapsedNanos = NANOS_PER_SECOND * (timeStampPosition / (double) sampleRate);
int64_t timeOfFrameZero = timestampNanos - elapsedNanos;
int64_t coldStartLatencyNanos = timeOfFrameZero - beginTimeNanos;
return coldStartLatencyNanos / NANOS_PER_MILLISECOND;
}
int32_t getColdStartInputMillis() {
std::shared_ptr<oboe::AudioStream> oboeStream = getInputStream();
if (oboeStream != nullptr) {
int64_t framesRead = oboeStream->getFramesRead();
if (framesRead > 0) {
// Base latency on the time that frame[0] would have been received by the app.
int64_t nowNanos = getNanoseconds();
return calculateColdStartLatencyMillis(oboeStream->getSampleRate(),
mInputOpenedAt,
framesRead,
nowNanos);
}
}
return -1;
}
int32_t getColdStartOutputMillis() {
std::shared_ptr<oboe::AudioStream> oboeStream = getOutputStream();
if (oboeStream != nullptr) {
auto result = oboeStream->getTimestamp(CLOCK_MONOTONIC);
if (result) {
auto frameTimestamp = result.value();
// Calculate the time that frame[0] would have been played by the speaker.
int64_t position = frameTimestamp.position;
int64_t timestampNanos = frameTimestamp.timestamp;
return calculateColdStartLatencyMillis(oboeStream->getSampleRate(),
mOutputOpenedAt,
position,
timestampNanos);
}
}
return -1;
}
/**
* Trigger a sound or impulse.
* @param enabled
*/
virtual void trigger() {}
bool isMMapUsed(int32_t streamIndex);
int32_t getFramesPerBlock() {
return (callbackSize == 0) ? mFramesPerBurst : callbackSize;
}
int64_t getCallbackCount() {
return oboeCallbackProxy.getCallbackCount();
}
oboe::Result getLastErrorCallbackResult() {
std::shared_ptr<oboe::AudioStream> stream = getOutputStream();
if (stream == nullptr) {
stream = getInputStream();
}
return stream ? oboe::Result::ErrorNull : stream->getLastErrorCallbackResult();
}
int32_t getFramesPerCallback() {
return oboeCallbackProxy.getFramesPerCallback();
}
virtual void setChannelEnabled(int channelIndex, bool enabled) {}
virtual void setSignalType(int signalType) {}
virtual int32_t saveWaveFile(const char *filename);
virtual void setMinimumFramesBeforeRead(int32_t numFrames) {}
static bool mUseCallback;
static int callbackSize;
double getTimestampLatency(int32_t streamIndex);
protected:
std::shared_ptr<oboe::AudioStream> getInputStream();
std::shared_ptr<oboe::AudioStream> getOutputStream();
int32_t allocateStreamIndex();
void freeStreamIndex(int32_t streamIndex);
virtual void createRecording() {
mRecording = std::make_unique<MultiChannelRecording>(mChannelCount,
SECONDS_TO_RECORD * mSampleRate);
}
virtual void finishOpen(bool isInput, oboe::AudioStream *oboeStream) {}
virtual oboe::Result startStreams() = 0;
std::unique_ptr<float []> dataBuffer{};
AudioStreamGateway audioStreamGateway;
OboeStreamCallbackProxy oboeCallbackProxy;
std::unique_ptr<MultiChannelRecording> mRecording{};
int32_t mNextStreamHandle = 0;
std::unordered_map<int32_t, std::shared_ptr<oboe::AudioStream>> mOboeStreams;
int32_t mFramesPerBurst = 0; // TODO per stream
int32_t mChannelCount = 0; // TODO per stream
int32_t mSampleRate = 0; // TODO per stream
std::atomic<bool> threadEnabled{false};
std::thread *dataThread = nullptr; // FIXME never gets deleted
private:
int64_t mInputOpenedAt = 0;
int64_t mOutputOpenedAt = 0;
};
/**
* Test a single input stream.
*/
class ActivityTestInput : public ActivityContext {
public:
ActivityTestInput() {}
virtual ~ActivityTestInput() = default;
void configureForStart() override;
double getPeakLevel(int index) override {
return mInputAnalyzer.getPeakLevel(index);
}
void runBlockingIO() override;
void setMinimumFramesBeforeRead(int32_t numFrames) override {
mInputAnalyzer.setMinimumFramesBeforeRead(numFrames);
mMinimumFramesBeforeRead = numFrames;
}
int32_t getMinimumFramesBeforeRead() const {
return mMinimumFramesBeforeRead;
}
protected:
oboe::Result startStreams() override {
mInputAnalyzer.reset();
mInputAnalyzer.setup(getInputStream()->getFramesPerBurst(),
getInputStream()->getChannelCount(),
getInputStream()->getFormat());
return getInputStream()->requestStart();
}
InputStreamCallbackAnalyzer mInputAnalyzer;
int32_t mMinimumFramesBeforeRead = 0;
};
/**
* Record a configured input stream and play it back some simple way.
*/
class ActivityRecording : public ActivityTestInput {
public:
ActivityRecording() {}
virtual ~ActivityRecording() = default;
oboe::Result stop() override {
oboe::Result resultStopPlayback = stopPlayback();
oboe::Result resultStopAudio = ActivityContext::stop();
return (resultStopPlayback != oboe::Result::OK) ? resultStopPlayback : resultStopAudio;
}
oboe::Result startPlayback() override;
oboe::Result stopPlayback() override;
PlayRecordingCallback mPlayRecordingCallback;
oboe::AudioStream *playbackStream = nullptr;
};
/**
* Test a single output stream.
*/
class ActivityTestOutput : public ActivityContext {
public:
ActivityTestOutput()
: sineOscillators(MAX_SINE_OSCILLATORS)
, sawtoothOscillators(MAX_SINE_OSCILLATORS) {}
virtual ~ActivityTestOutput() = default;
void close(int32_t streamIndex) override;
oboe::Result startStreams() override {
return getOutputStream()->start();
}
void configureForStart() override;
virtual void configureStreamGateway();
void runBlockingIO() override;
void setChannelEnabled(int channelIndex, bool enabled) override;
// WARNING - must match order in strings.xml and OboeAudioOutputStream.java
enum SignalType {
Sine = 0,
Sawtooth = 1,
FreqSweep = 2,
PitchSweep = 3,
WhiteNoise = 4
};
void setSignalType(int signalType) override {
mSignalType = (SignalType) signalType;
}
protected:
SignalType mSignalType = SignalType::Sine;
std::vector<SineOscillator> sineOscillators;
std::vector<SawtoothOscillator> sawtoothOscillators;
static constexpr float kSweepPeriod = 10.0; // for triangle up and down
// A triangle LFO is shaped into either a linear or an exponential range.
TriangleOscillator mTriangleOscillator;
LinearShape mLinearShape;
ExponentialShape mExponentialShape;
std::unique_ptr<ManyToMultiConverter> manyToMulti;
std::unique_ptr<MonoToMultiConverter> monoToMulti;
std::shared_ptr<oboe::flowgraph::SinkFloat> mSinkFloat;
std::shared_ptr<oboe::flowgraph::SinkI16> mSinkI16;
std::shared_ptr<oboe::flowgraph::SinkI24> mSinkI24;
std::shared_ptr<oboe::flowgraph::SinkI32> mSinkI32;
};
/**
* Generate a short beep with a very short attack.
* This is used by Java to measure output latency.
*/
class ActivityTapToTone : public ActivityTestOutput {
public:
ActivityTapToTone() {}
virtual ~ActivityTapToTone() = default;
void configureForStart() override;
virtual void trigger() override {
sawPingGenerator.trigger();
}
SawPingGenerator sawPingGenerator;
};
/**
* Activity that uses synchronized input/output streams.
*/
class ActivityFullDuplex : public ActivityContext {
public:
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
virtual int32_t getState() { return -1; }
virtual int32_t getResult() { return -1; }
virtual bool isAnalyzerDone() { return false; }
void setMinimumFramesBeforeRead(int32_t numFrames) override {
getFullDuplexAnalyzer()->setMinimumFramesBeforeRead(numFrames);
}
virtual FullDuplexAnalyzer *getFullDuplexAnalyzer() = 0;
int32_t getResetCount() {
return getFullDuplexAnalyzer()->getLoopbackProcessor()->getResetCount();
}
protected:
void createRecording() override {
mRecording = std::make_unique<MultiChannelRecording>(2, // output and input
SECONDS_TO_RECORD * mSampleRate);
}
};
/**
* Echo input to output through a delay line.
*/
class ActivityEcho : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
return mFullDuplexEcho->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
void setDelayTime(double delayTimeSeconds) {
if (mFullDuplexEcho) {
mFullDuplexEcho->setDelayTime(delayTimeSeconds);
}
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexEcho.get();
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexEcho> mFullDuplexEcho{};
};
/**
* Measure Round Trip Latency
*/
class ActivityRoundTripLatency : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
mAnalyzerLaunched = false;
return mFullDuplexLatency->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
LatencyAnalyzer *getLatencyAnalyzer() {
return &mEchoAnalyzer;
}
int32_t getState() override {
return getLatencyAnalyzer()->getState();
}
int32_t getResult() override {
return getLatencyAnalyzer()->getState(); // TODO This does not look right.
}
bool isAnalyzerDone() override {
if (!mAnalyzerLaunched) {
mAnalyzerLaunched = launchAnalysisIfReady();
}
return mEchoAnalyzer.isDone();
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexLatency.get();
}
static void analyzeData(PulseLatencyAnalyzer *analyzer) {
analyzer->analyze();
}
bool launchAnalysisIfReady() {
// Are we ready to do the analysis?
if (mEchoAnalyzer.hasEnoughData()) {
// Crunch the numbers on a separate thread.
std::thread t(analyzeData, &mEchoAnalyzer);
t.detach();
return true;
}
return false;
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexAnalyzer> mFullDuplexLatency{};
PulseLatencyAnalyzer mEchoAnalyzer;
bool mAnalyzerLaunched = false;
};
/**
* Measure Glitches
*/
class ActivityGlitches : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
return mFullDuplexGlitches->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
GlitchAnalyzer *getGlitchAnalyzer() {
return &mGlitchAnalyzer;
}
int32_t getState() override {
return getGlitchAnalyzer()->getState();
}
int32_t getResult() override {
return getGlitchAnalyzer()->getResult();
}
bool isAnalyzerDone() override {
return mGlitchAnalyzer.isDone();
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexGlitches.get();
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexAnalyzer> mFullDuplexGlitches{};
GlitchAnalyzer mGlitchAnalyzer;
};
/**
* Measure Data Path
*/
class ActivityDataPath : public ActivityFullDuplex {
public:
oboe::Result startStreams() override {
return mFullDuplexDataPath->start();
}
void configureBuilder(bool isInput, oboe::AudioStreamBuilder &builder) override;
void configureForStart() override {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
int32_t capacityInFrames = outputStream->getBufferCapacityInFrames();
int32_t burstInFrames = outputStream->getFramesPerBurst();
int32_t capacityInBursts = capacityInFrames / burstInFrames;
int32_t sizeInBursts = std::max(2, capacityInBursts / 2);
// Set size of buffer to minimize underruns.
auto result = outputStream->setBufferSizeInFrames(sizeInBursts * burstInFrames);
static_cast<void>(result); // Avoid unused variable.
LOGD("ActivityDataPath: %s() capacity = %d, burst = %d, size = %d",
__func__, capacityInFrames, burstInFrames, result.value());
}
DataPathAnalyzer *getDataPathAnalyzer() {
return &mDataPathAnalyzer;
}
FullDuplexAnalyzer *getFullDuplexAnalyzer() override {
return (FullDuplexAnalyzer *) mFullDuplexDataPath.get();
}
protected:
void finishOpen(bool isInput, oboe::AudioStream *oboeStream) override;
private:
std::unique_ptr<FullDuplexAnalyzer> mFullDuplexDataPath{};
DataPathAnalyzer mDataPathAnalyzer;
};
/**
* Test a single output stream.
*/
class ActivityTestDisconnect : public ActivityContext {
public:
ActivityTestDisconnect() {}
virtual ~ActivityTestDisconnect() = default;
void close(int32_t streamIndex) override;
oboe::Result startStreams() override {
std::shared_ptr<oboe::AudioStream> outputStream = getOutputStream();
if (outputStream) {
return outputStream->start();
}
std::shared_ptr<oboe::AudioStream> inputStream = getInputStream();
if (inputStream) {
return inputStream->start();
}
return oboe::Result::ErrorNull;
}
void configureForStart() override;
private:
std::unique_ptr<SineOscillator> sineOscillator;
std::unique_ptr<MonoToMultiConverter> monoToMulti;
std::shared_ptr<oboe::flowgraph::SinkFloat> mSinkFloat;
};
/**
* Switch between various
*/
class NativeAudioContext {
public:
ActivityContext *getCurrentActivity() {
return currentActivity;
};
void setActivityType(int activityType) {
mActivityType = (ActivityType) activityType;
switch(mActivityType) {
default:
case ActivityType::Undefined:
case ActivityType::TestOutput:
currentActivity = &mActivityTestOutput;
break;
case ActivityType::TestInput:
currentActivity = &mActivityTestInput;
break;
case ActivityType::TapToTone:
currentActivity = &mActivityTapToTone;
break;
case ActivityType::RecordPlay:
currentActivity = &mActivityRecording;
break;
case ActivityType::Echo:
currentActivity = &mActivityEcho;
break;
case ActivityType::RoundTripLatency:
currentActivity = &mActivityRoundTripLatency;
break;
case ActivityType::Glitches:
currentActivity = &mActivityGlitches;
break;
case ActivityType::TestDisconnect:
currentActivity = &mActivityTestDisconnect;
break;
case ActivityType::DataPath:
currentActivity = &mActivityDataPath;
break;
}
}
void setDelayTime(double delayTimeMillis) {
mActivityEcho.setDelayTime(delayTimeMillis);
}
ActivityTestOutput mActivityTestOutput;
ActivityTestInput mActivityTestInput;
ActivityTapToTone mActivityTapToTone;
ActivityRecording mActivityRecording;
ActivityEcho mActivityEcho;
ActivityRoundTripLatency mActivityRoundTripLatency;
ActivityGlitches mActivityGlitches;
ActivityDataPath mActivityDataPath;
ActivityTestDisconnect mActivityTestDisconnect;
private:
// WARNING - must match definitions in TestAudioActivity.java
enum ActivityType {
Undefined = -1,
TestOutput = 0,
TestInput = 1,
TapToTone = 2,
RecordPlay = 3,
Echo = 4,
RoundTripLatency = 5,
Glitches = 6,
TestDisconnect = 7,
DataPath = 8,
};
ActivityType mActivityType = ActivityType::Undefined;
ActivityContext *currentActivity = &mActivityTestOutput;
};
#endif //NATIVEOBOE_NATIVEAUDIOCONTEXT_H

View File

@ -0,0 +1,81 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "common/OboeDebug.h"
#include "OboeStreamCallbackProxy.h"
// Linear congruential random number generator.
static uint32_t s_random16() {
static uint32_t seed = 1234;
seed = ((seed * 31421) + 6927) & 0x0FFFF;
return seed;
}
/**
* The random number generator is good for burning CPU because the compiler cannot
* easily optimize away the computation.
* @param workload number of times to execute the loop
* @return a white noise value between -1.0 and +1.0
*/
static float s_burnCPU(int32_t workload) {
uint32_t random = 0;
for (int32_t i = 0; i < workload; i++) {
for (int32_t j = 0; j < 10; j++) {
random = random ^ s_random16();
}
}
return (random - 32768) * (1.0 / 32768);
}
bool OboeStreamCallbackProxy::mCallbackReturnStop = false;
int64_t OboeStreamCallbackProxy::getNanoseconds(clockid_t clockId) {
struct timespec time;
int result = clock_gettime(clockId, &time);
if (result < 0) {
return result;
}
return (time.tv_sec * 1e9) + time.tv_nsec;
}
oboe::DataCallbackResult OboeStreamCallbackProxy::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
oboe::DataCallbackResult callbackResult = oboe::DataCallbackResult::Stop;
int64_t startTimeNanos = getNanoseconds();
mCallbackCount++;
mFramesPerCallback = numFrames;
if (mCallbackReturnStop) {
return oboe::DataCallbackResult::Stop;
}
s_burnCPU((int32_t)(mWorkload * kWorkloadScaler * numFrames));
if (mCallback != nullptr) {
callbackResult = mCallback->onAudioReady(audioStream, audioData, numFrames);
}
// Update CPU load
double calculationTime = (double)(getNanoseconds() - startTimeNanos);
double inverseRealTime = audioStream->getSampleRate() / (1.0e9 * numFrames);
double currentCpuLoad = calculationTime * inverseRealTime; // avoid a divide
mCpuLoad = (mCpuLoad * 0.95) + (currentCpuLoad * 0.05); // simple low pass filter
return callbackResult;
}

View File

@ -0,0 +1,88 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_OBOESTREAMCALLBACKPROXY_H
#define NATIVEOBOE_OBOESTREAMCALLBACKPROXY_H
#include <unistd.h>
#include <sys/types.h>
#include "oboe/Oboe.h"
class OboeStreamCallbackProxy : public oboe::AudioStreamCallback {
public:
void setCallback(oboe::AudioStreamCallback *callback) {
mCallback = callback;
setCallbackCount(0);
}
static void setCallbackReturnStop(bool b) {
mCallbackReturnStop = b;
}
int64_t getCallbackCount() {
return mCallbackCount;
}
void setCallbackCount(int64_t count) {
mCallbackCount = count;
}
int32_t getFramesPerCallback() {
return mFramesPerCallback.load();
}
/**
* Called when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) override;
/**
* Specify the amount of artificial workload that will waste CPU cycles
* and increase the CPU load.
* @param workload typically ranges from 0.0 to 100.0
*/
void setWorkload(double workload) {
mWorkload = std::max(0.0, workload);
}
double getWorkload() const {
return mWorkload;
}
double getCpuLoad() const {
return mCpuLoad;
}
static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC);
private:
static constexpr int32_t kWorkloadScaler = 500;
double mWorkload = 0.0;
std::atomic<double> mCpuLoad{0};
oboe::AudioStreamCallback *mCallback = nullptr;
static bool mCallbackReturnStop;
int64_t mCallbackCount = 0;
std::atomic<int32_t> mFramesPerCallback{0};
};
#endif //NATIVEOBOE_OBOESTREAMCALLBACKPROXY_H

View File

@ -0,0 +1,40 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "AudioStreamGateway.h"
#include "oboe/Oboe.h"
#include "common/OboeDebug.h"
#include <sched.h>
#include <cstring>
#include "OboeTesterStreamCallback.h"
// Print if scheduler changes.
void OboeTesterStreamCallback::printScheduler() {
#if OBOE_ENABLE_LOGGING
int scheduler = sched_getscheduler(gettid());
if (scheduler != mPreviousScheduler) {
int schedulerType = scheduler & 0xFFFF; // mask off high flags
LOGD("callback CPU scheduler = 0x%08x = %s",
scheduler,
((schedulerType == SCHED_FIFO) ? "SCHED_FIFO" :
((schedulerType == SCHED_OTHER) ? "SCHED_OTHER" :
((schedulerType == SCHED_RR) ? "SCHED_RR" : "UNKNOWN")))
);
mPreviousScheduler = scheduler;
}
#endif
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_STREAM_CALLBACK_H
#define OBOETESTER_STREAM_CALLBACK_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "oboe/Oboe.h"
class OboeTesterStreamCallback : public oboe::AudioStreamCallback {
public:
virtual ~OboeTesterStreamCallback() = default;
// Call this before starting.
void reset() {
mPreviousScheduler = -1;
}
protected:
void printScheduler();
int mPreviousScheduler = -1;
};
#endif //OBOETESTER_STREAM_CALLBACK_H

View File

@ -0,0 +1,33 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "PlayRecordingCallback.h"
/**
* Called when the stream is ready to process audio.
*/
oboe::DataCallbackResult PlayRecordingCallback::onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames) {
float *floatData = (float *)audioData;
// Read stored data into the buffer provided.
int32_t framesRead = mRecording->read(floatData, numFrames);
// LOGI("%s() framesRead = %d, numFrames = %d", __func__, framesRead, numFrames);
return framesRead > 0
? oboe::DataCallbackResult::Continue
: oboe::DataCallbackResult::Stop;
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_PLAY_RECORDING_CALLBACK_H
#define NATIVEOBOE_PLAY_RECORDING_CALLBACK_H
#include "oboe/Oboe.h"
#include "MultiChannelRecording.h"
class PlayRecordingCallback : public oboe::AudioStreamCallback {
public:
PlayRecordingCallback() {}
~PlayRecordingCallback() = default;
void setRecording(MultiChannelRecording *recording) {
mRecording = recording;
}
/**
* Called when the stream is ready to process audio.
*/
oboe::DataCallbackResult onAudioReady(
oboe::AudioStream *audioStream,
void *audioData,
int numFrames);
private:
MultiChannelRecording *mRecording = nullptr;
};
#endif //NATIVEOBOE_PLAYRECORDINGCALLBACK_H

View File

@ -0,0 +1,69 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <unistd.h>
#include "common/OboeDebug.h"
#include "oboe/Definitions.h"
#include "SawPingGenerator.h"
using namespace oboe::flowgraph;
SawPingGenerator::SawPingGenerator()
: OscillatorBase()
, mRequestCount(0)
, mAcknowledgeCount(0)
, mLevel(0.0f) {
}
SawPingGenerator::~SawPingGenerator() { }
void SawPingGenerator::reset() {
FlowGraphNode::reset();
mAcknowledgeCount.store(mRequestCount.load());
}
int32_t SawPingGenerator::onProcess(int numFrames) {
const float *frequencies = frequency.getBuffer();
const float *amplitudes = amplitude.getBuffer();
float *buffer = output.getBuffer();
if (mRequestCount.load() > mAcknowledgeCount.load()) {
mPhase = -1.0f;
mLevel = 1.0;
mAcknowledgeCount++;
}
// Check level to prevent numeric underflow.
if (mLevel > 0.000001) {
for (int i = 0; i < numFrames; i++) {
float sawtooth = incrementPhase(frequencies[i]);
*buffer++ = (float) (sawtooth * mLevel * amplitudes[i]);
mLevel *= 0.999;
}
} else {
for (int i = 0; i < numFrames; i++) {
*buffer++ = 0.0f;
}
}
return numFrames;
}
void SawPingGenerator::trigger() {
mRequestCount++;
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_SAWPINGGENERATOR_H
#define NATIVEOBOE_SAWPINGGENERATOR_H
#include <atomic>
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "flowunits/OscillatorBase.h"
class SawPingGenerator : public OscillatorBase {
public:
SawPingGenerator();
virtual ~SawPingGenerator();
int32_t onProcess(int numFrames) override;
void trigger();
void reset() override;
private:
std::atomic<int> mRequestCount; // external thread increments this to request a beep
std::atomic<int> mAcknowledgeCount; // audio thread sets this to acknowledge
double mLevel;
};
#endif //NATIVEOBOE_SAWPINGGENERATOR_H

View File

@ -0,0 +1,203 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_BASE_SINE_ANALYZER_H
#define ANALYZER_BASE_SINE_ANALYZER_H
#include <algorithm>
#include <cctype>
#include <iomanip>
#include <iostream>
#include "InfiniteRecording.h"
#include "LatencyAnalyzer.h"
/**
* Output a steady sine wave and analyze the return signal.
*
* Use a cosine transform to measure the predicted magnitude and relative phase of the
* looped back sine wave. Then generate a predicted signal and compare with the actual signal.
*/
class BaseSineAnalyzer : public LoopbackProcessor {
public:
BaseSineAnalyzer()
: LoopbackProcessor()
, mInfiniteRecording(64 * 1024) {}
virtual bool isOutputEnabled() { return true; }
void setMagnitude(double magnitude) {
mMagnitude = magnitude;
mScaledTolerance = mMagnitude * mTolerance;
}
double getPhaseOffset() {
return mPhaseOffset;
}
double getMagnitude() const {
return mMagnitude;
}
void setInputChannel(int inputChannel) {
mInputChannel = inputChannel;
}
int getInputChannel() const {
return mInputChannel;
}
void setOutputChannel(int outputChannel) {
mOutputChannel = outputChannel;
}
int getOutputChannel() const {
return mOutputChannel;
}
void setNoiseAmplitude(double noiseAmplitude) {
mNoiseAmplitude = noiseAmplitude;
}
double getNoiseAmplitude() const {
return mNoiseAmplitude;
}
double getTolerance() {
return mTolerance;
}
void setTolerance(double tolerance) {
mTolerance = tolerance;
}
// advance and wrap phase
void incrementOutputPhase() {
mOutputPhase += mPhaseIncrement;
if (mOutputPhase > M_PI) {
mOutputPhase -= (2.0 * M_PI);
}
}
/**
* @param frameData upon return, contains the reference sine wave
* @param channelCount
*/
result_code processOutputFrame(float *frameData, int channelCount) override {
float output = 0.0f;
// Output sine wave so we can measure it.
if (isOutputEnabled()) {
float sinOut = sinf(mOutputPhase);
incrementOutputPhase();
output = (sinOut * mOutputAmplitude)
+ (mWhiteNoise.nextRandomDouble() * mNoiseAmplitude);
// ALOGD("sin(%f) = %f, %f\n", mOutputPhase, sinOut, mPhaseIncrement);
}
for (int i = 0; i < channelCount; i++) {
frameData[i] = (i == mOutputChannel) ? output : 0.0f;
}
return RESULT_OK;
}
/**
* Calculate the magnitude of the component of the input signal
* that matches the analysis frequency.
* Also calculate the phase that we can use to create a
* signal that matches that component.
* The phase will be between -PI and +PI.
*/
double calculateMagnitudePhase(double *phasePtr = nullptr) {
if (mFramesAccumulated == 0) {
return 0.0;
}
double sinMean = mSinAccumulator / mFramesAccumulated;
double cosMean = mCosAccumulator / mFramesAccumulated;
double magnitude = 2.0 * sqrt((sinMean * sinMean) + (cosMean * cosMean));
if (phasePtr != nullptr) {
double phase = M_PI_2 - atan2(sinMean, cosMean);
*phasePtr = phase;
}
return magnitude;
}
bool transformSample(float sample, float referencePhase) {
// Track incoming signal and slowly adjust magnitude to account
// for drift in the DRC or AGC.
mSinAccumulator += sample * sinf(referencePhase);
mCosAccumulator += sample * cosf(referencePhase);
mFramesAccumulated++;
// Must be a multiple of the period or the calculation will not be accurate.
if (mFramesAccumulated == mSinePeriod) {
const double coefficient = 0.1;
double magnitude = calculateMagnitudePhase(&mPhaseOffset);
// One pole averaging filter.
setMagnitude((mMagnitude * (1.0 - coefficient)) + (magnitude * coefficient));
return true;
} else {
return false;
}
}
// reset the sine wave detector
virtual void resetAccumulator() {
mFramesAccumulated = 0;
mSinAccumulator = 0.0;
mCosAccumulator = 0.0;
}
void reset() override {
LoopbackProcessor::reset();
resetAccumulator();
}
void prepareToTest() override {
LoopbackProcessor::prepareToTest();
mSinePeriod = getSampleRate() / kTargetGlitchFrequency;
mOutputPhase = 0.0f;
mInverseSinePeriod = 1.0 / mSinePeriod;
mPhaseIncrement = 2.0 * M_PI * mInverseSinePeriod;
}
protected:
static constexpr int32_t kTargetGlitchFrequency = 1000;
int32_t mSinePeriod = 1; // this will be set before use
double mInverseSinePeriod = 1.0;
double mPhaseIncrement = 0.0;
double mOutputPhase = 0.0;
double mOutputAmplitude = 0.75;
// If this jumps around then we are probably just hearing noise.
double mPhaseOffset = 0.0;
double mMagnitude = 0.0;
int32_t mFramesAccumulated = 0;
double mSinAccumulator = 0.0;
double mCosAccumulator = 0.0;
double mScaledTolerance = 0.0;
InfiniteRecording<float> mInfiniteRecording;
private:
int32_t mInputChannel = 0;
int32_t mOutputChannel = 0;
float mTolerance = 0.10; // scaled from 0.0 to 1.0
float mNoiseAmplitude = 0.00; // Used to experiment with warbling caused by DRC.
PseudoRandom mWhiteNoise;
};
#endif //ANALYZER_BASE_SINE_ANALYZER_H

View File

@ -0,0 +1,94 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_DATA_PATH_ANALYZER_H
#define ANALYZER_DATA_PATH_ANALYZER_H
#include <algorithm>
#include <cctype>
#include <iomanip>
#include <iostream>
#include <math.h>
#include "BaseSineAnalyzer.h"
#include "InfiniteRecording.h"
#include "LatencyAnalyzer.h"
/**
* Output a steady sine wave and analyze the return signal.
*
* Use a cosine transform to measure the predicted magnitude and relative phase of the
* looped back sine wave.
*/
class DataPathAnalyzer : public BaseSineAnalyzer {
public:
DataPathAnalyzer() : BaseSineAnalyzer() {
// Add a little bit of noise to reduce blockage by speaker protection and DRC.
setNoiseAmplitude(0.05);
}
/**
* @param frameData contains microphone data with sine signal feedback
* @param channelCount
*/
result_code processInputFrame(const float *frameData, int /* channelCount */) override {
result_code result = RESULT_OK;
float sample = frameData[getInputChannel()];
mInfiniteRecording.write(sample);
if (transformSample(sample, mOutputPhase)) {
resetAccumulator();
}
// Update MaxMagnitude if we are locked.
double diff = abs(mPhaseOffset - mPreviousPhaseOffset);
if (diff < mPhaseTolerance) {
mMaxMagnitude = std::max(mMagnitude, mMaxMagnitude);
}
mPreviousPhaseOffset = mPhaseOffset;
return result;
}
std::string analyze() override {
std::stringstream report;
report << "DataPathAnalyzer ------------------\n";
report << LOOPBACK_RESULT_TAG "sine.magnitude = " << std::setw(8)
<< mMagnitude << "\n";
report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
<< mFramesAccumulated << "\n";
report << LOOPBACK_RESULT_TAG "sine.period = " << std::setw(8)
<< mSinePeriod << "\n";
return report.str();
}
void reset() override {
BaseSineAnalyzer::reset();
mPreviousPhaseOffset = 999.0; // Arbitrary high offset to prevent early lock.
mMaxMagnitude = 0.0;
}
double getMaxMagnitude() {
return mMaxMagnitude;
}
private:
double mPreviousPhaseOffset = 0.0;
double mPhaseTolerance = 2 * M_PI / 48;
double mMaxMagnitude = 0.0;
};
#endif // ANALYZER_DATA_PATH_ANALYZER_H

View File

@ -0,0 +1,363 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_GLITCH_ANALYZER_H
#define ANALYZER_GLITCH_ANALYZER_H
#include <algorithm>
#include <cctype>
#include <iomanip>
#include <iostream>
#include "InfiniteRecording.h"
#include "LatencyAnalyzer.h"
#include "BaseSineAnalyzer.h"
#include "PseudoRandom.h"
/**
* Output a steady sine wave and analyze the return signal.
*
* Use a cosine transform to measure the predicted magnitude and relative phase of the
* looped back sine wave. Then generate a predicted signal and compare with the actual signal.
*/
class GlitchAnalyzer : public BaseSineAnalyzer {
public:
GlitchAnalyzer() : BaseSineAnalyzer() {}
int32_t getState() const {
return mState;
}
double getPeakAmplitude() const {
return mPeakFollower.getLevel();
}
int32_t getGlitchCount() const {
return mGlitchCount;
}
int32_t getStateFrameCount(int state) const {
return mStateFrameCounters[state];
}
double getSignalToNoiseDB() {
static const double threshold = 1.0e-14;
if (mMeanSquareSignal < threshold || mMeanSquareNoise < threshold) {
return 0.0;
} else {
double signalToNoise = mMeanSquareSignal / mMeanSquareNoise; // power ratio
double signalToNoiseDB = 10.0 * log(signalToNoise);
if (signalToNoiseDB < MIN_SNR_DB) {
ALOGD("ERROR - signal to noise ratio is too low! < %d dB. Adjust volume.",
MIN_SNR_DB);
setResult(ERROR_VOLUME_TOO_LOW);
}
return signalToNoiseDB;
}
}
std::string analyze() override {
std::stringstream report;
report << "GlitchAnalyzer ------------------\n";
report << LOOPBACK_RESULT_TAG "peak.amplitude = " << std::setw(8)
<< getPeakAmplitude() << "\n";
report << LOOPBACK_RESULT_TAG "sine.magnitude = " << std::setw(8)
<< mMagnitude << "\n";
report << LOOPBACK_RESULT_TAG "rms.noise = " << std::setw(8)
<< mMeanSquareNoise << "\n";
report << LOOPBACK_RESULT_TAG "signal.to.noise.db = " << std::setw(8)
<< getSignalToNoiseDB() << "\n";
report << LOOPBACK_RESULT_TAG "frames.accumulated = " << std::setw(8)
<< mFramesAccumulated << "\n";
report << LOOPBACK_RESULT_TAG "sine.period = " << std::setw(8)
<< mSinePeriod << "\n";
report << LOOPBACK_RESULT_TAG "test.state = " << std::setw(8)
<< mState << "\n";
report << LOOPBACK_RESULT_TAG "frame.count = " << std::setw(8)
<< mFrameCounter << "\n";
// Did we ever get a lock?
bool gotLock = (mState == STATE_LOCKED) || (mGlitchCount > 0);
if (!gotLock) {
report << "ERROR - failed to lock on reference sine tone.\n";
setResult(ERROR_NO_LOCK);
} else {
// Only print if meaningful.
report << LOOPBACK_RESULT_TAG "glitch.count = " << std::setw(8)
<< mGlitchCount << "\n";
report << LOOPBACK_RESULT_TAG "max.glitch = " << std::setw(8)
<< mMaxGlitchDelta << "\n";
if (mGlitchCount > 0) {
report << "ERROR - number of glitches > 0\n";
setResult(ERROR_GLITCHES);
}
}
return report.str();
}
void printStatus() override {
ALOGD("st = %d, #gl = %3d,", mState, mGlitchCount);
}
/**
* @param frameData contains microphone data with sine signal feedback
* @param channelCount
*/
result_code processInputFrame(const float *frameData, int /* channelCount */) override {
result_code result = RESULT_OK;
float sample = frameData[0];
float peak = mPeakFollower.process(sample);
mInfiniteRecording.write(sample);
// Force a periodic glitch to test the detector!
if (mForceGlitchDuration > 0) {
if (mForceGlitchCounter == 0) {
ALOGE("%s: force a glitch!!", __func__);
mForceGlitchCounter = getSampleRate();
} else if (mForceGlitchCounter <= mForceGlitchDuration) {
// Force an abrupt offset.
sample += (sample > 0.0) ? -0.5f : 0.5f;
}
--mForceGlitchCounter;
}
mStateFrameCounters[mState]++; // count how many frames we are in each state
switch (mState) {
case STATE_IDLE:
mDownCounter--;
if (mDownCounter <= 0) {
mState = STATE_IMMUNE;
mDownCounter = IMMUNE_FRAME_COUNT;
mInputPhase = 0.0; // prevent spike at start
mOutputPhase = 0.0;
}
break;
case STATE_IMMUNE:
mDownCounter--;
if (mDownCounter <= 0) {
mState = STATE_WAITING_FOR_SIGNAL;
}
break;
case STATE_WAITING_FOR_SIGNAL:
if (peak > mThreshold) {
mState = STATE_WAITING_FOR_LOCK;
//ALOGD("%5d: switch to STATE_WAITING_FOR_LOCK", mFrameCounter);
resetAccumulator();
}
break;
case STATE_WAITING_FOR_LOCK:
mSinAccumulator += sample * sinf(mInputPhase);
mCosAccumulator += sample * cosf(mInputPhase);
mFramesAccumulated++;
// Must be a multiple of the period or the calculation will not be accurate.
if (mFramesAccumulated == mSinePeriod * PERIODS_NEEDED_FOR_LOCK) {
setMagnitude(calculateMagnitudePhase(&mPhaseOffset));
// ALOGD("%s() mag = %f, offset = %f, prev = %f",
// __func__, mMagnitude, mPhaseOffset, mPreviousPhaseOffset);
if (mMagnitude > mThreshold) {
if (abs(mPhaseOffset) < kMaxPhaseError) {
mState = STATE_LOCKED;
// ALOGD("%5d: switch to STATE_LOCKED", mFrameCounter);
}
// Adjust mInputPhase to match measured phase
mInputPhase += mPhaseOffset;
}
resetAccumulator();
}
incrementInputPhase();
break;
case STATE_LOCKED: {
// Predict next sine value
double predicted = sinf(mInputPhase) * mMagnitude;
double diff = predicted - sample;
double absDiff = fabs(diff);
mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
if (absDiff > mScaledTolerance) {
result = ERROR_GLITCHES;
onGlitchStart();
// LOGI("diff glitch detected, absDiff = %g", absDiff);
} else {
mSumSquareSignal += predicted * predicted;
mSumSquareNoise += diff * diff;
// Track incoming signal and slowly adjust magnitude to account
// for drift in the DRC or AGC.
// Must be a multiple of the period or the calculation will not be accurate.
if (transformSample(sample, mInputPhase)) {
mMeanSquareNoise = mSumSquareNoise * mInverseSinePeriod;
mMeanSquareSignal = mSumSquareSignal * mInverseSinePeriod;
resetAccumulator();
if (abs(mPhaseOffset) > kMaxPhaseError) {
result = ERROR_GLITCHES;
onGlitchStart();
ALOGD("phase glitch detected, phaseOffset = %g", mPhaseOffset);
} else if (mMagnitude < mThreshold) {
result = ERROR_GLITCHES;
onGlitchStart();
ALOGD("magnitude glitch detected, mMagnitude = %g", mMagnitude);
}
}
}
incrementInputPhase();
} break;
case STATE_GLITCHING: {
// Predict next sine value
mGlitchLength++;
double predicted = sinf(mInputPhase) * mMagnitude;
double diff = predicted - sample;
double absDiff = fabs(diff);
mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
if (absDiff < mScaledTolerance) { // close enough?
// If we get a full sine period of non-glitch samples in a row then consider the glitch over.
// We don't want to just consider a zero crossing the end of a glitch.
if (mNonGlitchCount++ > mSinePeriod) {
onGlitchEnd();
}
} else {
mNonGlitchCount = 0;
if (mGlitchLength > (4 * mSinePeriod)) {
relock();
}
}
incrementInputPhase();
} break;
case NUM_STATES: // not a real state
break;
}
mFrameCounter++;
return result;
}
// advance and wrap phase
void incrementInputPhase() {
mInputPhase += mPhaseIncrement;
if (mInputPhase > M_PI) {
mInputPhase -= (2.0 * M_PI);
}
}
bool isOutputEnabled() override { return mState != STATE_IDLE; }
void onGlitchStart() {
mGlitchCount++;
// ALOGD("%5d: STARTED a glitch # %d", mFrameCounter, mGlitchCount);
mState = STATE_GLITCHING;
mGlitchLength = 1;
mNonGlitchCount = 0;
mLastGlitchPosition = mInfiniteRecording.getTotalWritten();
}
void onGlitchEnd() {
// ALOGD("%5d: ENDED a glitch # %d, length = %d", mFrameCounter, mGlitchCount, mGlitchLength);
mState = STATE_LOCKED;
resetAccumulator();
}
// reset the sine wave detector
void resetAccumulator() override {
BaseSineAnalyzer::resetAccumulator();
mSumSquareSignal = 0.0;
mSumSquareNoise = 0.0;
}
void relock() {
// ALOGD("relock: %d because of a very long %d glitch", mFrameCounter, mGlitchLength);
mState = STATE_WAITING_FOR_LOCK;
resetAccumulator();
}
void reset() override {
BaseSineAnalyzer::reset();
mState = STATE_IDLE;
mDownCounter = IDLE_FRAME_COUNT;
}
void prepareToTest() override {
BaseSineAnalyzer::prepareToTest();
mGlitchCount = 0;
mMaxGlitchDelta = 0.0;
for (int i = 0; i < NUM_STATES; i++) {
mStateFrameCounters[i] = 0;
}
}
int32_t getLastGlitch(float *buffer, int32_t length) {
return mInfiniteRecording.readFrom(buffer, mLastGlitchPosition - 32, length);
}
private:
// These must match the values in GlitchActivity.java
enum sine_state_t {
STATE_IDLE, // beginning
STATE_IMMUNE, // ignoring input, waiting fo HW to settle
STATE_WAITING_FOR_SIGNAL, // looking for a loud signal
STATE_WAITING_FOR_LOCK, // trying to lock onto the phase of the sine
STATE_LOCKED, // locked on the sine wave, looking for glitches
STATE_GLITCHING, // locked on the sine wave but glitching
NUM_STATES
};
enum constants {
// Arbitrary durations, assuming 48000 Hz
IDLE_FRAME_COUNT = 48 * 100,
IMMUNE_FRAME_COUNT = 48 * 100,
PERIODS_NEEDED_FOR_LOCK = 8,
MIN_SNR_DB = 65
};
static constexpr double kMaxPhaseError = M_PI * 0.05;
double mThreshold = 0.005;
int32_t mStateFrameCounters[NUM_STATES];
sine_state_t mState = STATE_IDLE;
int64_t mLastGlitchPosition;
double mInputPhase = 0.0;
double mMaxGlitchDelta = 0.0;
int32_t mGlitchCount = 0;
int32_t mNonGlitchCount = 0;
int32_t mGlitchLength = 0;
int mDownCounter = IDLE_FRAME_COUNT;
int32_t mFrameCounter = 0;
int32_t mForceGlitchDuration = 0; // if > 0 then force a glitch for debugging
int32_t mForceGlitchCounter = 4 * 48000; // count down and trigger at zero
// measure background noise continuously as a deviation from the expected signal
double mSumSquareSignal = 0.0;
double mSumSquareNoise = 0.0;
double mMeanSquareSignal = 0.0;
double mMeanSquareNoise = 0.0;
PeakDetector mPeakFollower;
};
#endif //ANALYZER_GLITCH_ANALYZER_H

View File

@ -0,0 +1,67 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_INFINITE_RECORDING_H
#define OBOETESTER_INFINITE_RECORDING_H
#include <memory>
#include <unistd.h>
/**
* Record forever. Keep last data.
*/
template <typename T>
class InfiniteRecording {
public:
InfiniteRecording(size_t maxSamples)
: mMaxSamples(maxSamples) {
mData = std::make_unique<T[]>(mMaxSamples);
}
int32_t readFrom(T *buffer, size_t position, size_t count) {
const size_t maxPosition = mWritten.load();
position = std::min(position, maxPosition);
size_t numToRead = std::min(count, mMaxSamples);
numToRead = std::min(numToRead, maxPosition - position);
if (numToRead == 0) return 0;
// We may need to read in two parts if it wraps.
const size_t offset = position % mMaxSamples;
const size_t firstReadSize = std::min(numToRead, mMaxSamples - offset); // till end
std::copy(&mData[offset], &mData[offset + firstReadSize], buffer);
if (firstReadSize < numToRead) {
// Second read needed.
std::copy(&mData[0], &mData[numToRead - firstReadSize], &buffer[firstReadSize]);
}
return numToRead;
}
void write(T sample) {
const size_t position = mWritten.load();
const size_t offset = position % mMaxSamples;
mData[offset] = sample;
mWritten++;
}
int64_t getTotalWritten() {
return mWritten.load();
}
private:
std::unique_ptr<T[]> mData;
std::atomic<size_t> mWritten{0};
const size_t mMaxSamples;
};
#endif //OBOETESTER_INFINITE_RECORDING_H

View File

@ -0,0 +1,622 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Tools for measuring latency and for detecting glitches.
* These classes are pure math and can be used with any audio system.
*/
#ifndef ANALYZER_LATENCY_ANALYZER_H
#define ANALYZER_LATENCY_ANALYZER_H
#include <algorithm>
#include <assert.h>
#include <cctype>
#include <iomanip>
#include <iostream>
#include <math.h>
#include <memory>
#include <sstream>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <vector>
#include "PeakDetector.h"
#include "PseudoRandom.h"
#include "RandomPulseGenerator.h"
// This is used when the code is in Oboe.
#ifndef ALOGD
#define ALOGD LOGD
#define ALOGE LOGE
#define ALOGW LOGW
#endif
#define LOOPBACK_RESULT_TAG "RESULT: "
static constexpr int32_t kDefaultSampleRate = 48000;
static constexpr int32_t kMillisPerSecond = 1000; // by definition
static constexpr int32_t kMaxLatencyMillis = 1000; // arbitrary and generous
static constexpr double kMinimumConfidence = 0.2;
struct LatencyReport {
int32_t latencyInFrames = 0.0;
double confidence = 0.0;
void reset() {
latencyInFrames = 0;
confidence = 0.0;
}
};
// Calculate a normalized cross correlation.
static double calculateNormalizedCorrelation(const float *a,
const float *b,
int windowSize) {
double correlation = 0.0;
double sumProducts = 0.0;
double sumSquares = 0.0;
// Correlate a against b.
for (int i = 0; i < windowSize; i++) {
float s1 = a[i];
float s2 = b[i];
// Use a normalized cross-correlation.
sumProducts += s1 * s2;
sumSquares += ((s1 * s1) + (s2 * s2));
}
if (sumSquares >= 1.0e-9) {
correlation = 2.0 * sumProducts / sumSquares;
}
return correlation;
}
static double calculateRootMeanSquare(float *data, int32_t numSamples) {
double sum = 0.0;
for (int32_t i = 0; i < numSamples; i++) {
float sample = data[i];
sum += sample * sample;
}
return sqrt(sum / numSamples);
}
/**
* Monophonic recording with processing.
*/
class AudioRecording
{
public:
void allocate(int maxFrames) {
mData = std::make_unique<float[]>(maxFrames);
mMaxFrames = maxFrames;
}
// Write SHORT data from the first channel.
int32_t write(const int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
}
for (int i = 0; i < numFrames; i++) {
mData[mFrameCounter++] = inputData[i * inputChannelCount] * (1.0f / 32768);
}
return numFrames;
}
// Write FLOAT data from the first channel.
int32_t write(const float *inputData, int32_t inputChannelCount, int32_t numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
}
for (int i = 0; i < numFrames; i++) {
mData[mFrameCounter++] = inputData[i * inputChannelCount];
}
return numFrames;
}
// Write FLOAT data from the first channel.
int32_t write(float sample) {
// stop at end of buffer
if (mFrameCounter < mMaxFrames) {
mData[mFrameCounter++] = sample;
return 1;
}
return 0;
}
void clear() {
mFrameCounter = 0;
}
int32_t size() const {
return mFrameCounter;
}
bool isFull() const {
return mFrameCounter >= mMaxFrames;
}
float *getData() const {
return mData.get();
}
void setSampleRate(int32_t sampleRate) {
mSampleRate = sampleRate;
}
int32_t getSampleRate() const {
return mSampleRate;
}
/**
* Square the samples so they are all positive and so the peaks are emphasized.
*/
void square() {
float *x = mData.get();
for (int i = 0; i < mFrameCounter; i++) {
x[i] *= x[i];
}
}
/**
* Amplify a signal so that the peak matches the specified target.
*
* @param target final max value
* @return gain applied to signal
*/
float normalize(float target) {
float maxValue = 1.0e-9f;
for (int i = 0; i < mFrameCounter; i++) {
maxValue = std::max(maxValue, abs(mData[i]));
}
float gain = target / maxValue;
for (int i = 0; i < mFrameCounter; i++) {
mData[i] *= gain;
}
return gain;
}
private:
std::unique_ptr<float[]> mData;
int32_t mFrameCounter = 0;
int32_t mMaxFrames = 0;
int32_t mSampleRate = kDefaultSampleRate; // common default
};
static int measureLatencyFromPulse(AudioRecording &recorded,
AudioRecording &pulse,
LatencyReport *report) {
report->latencyInFrames = 0;
report->confidence = 0.0;
int numCorrelations = recorded.size() - pulse.size();
if (numCorrelations < 10) {
ALOGE("%s() recording too small = %d frames\n", __func__, recorded.size());
return -1;
}
std::unique_ptr<float[]> correlations= std::make_unique<float[]>(numCorrelations);
// Correlate pulse against the recorded data.
for (int i = 0; i < numCorrelations; i++) {
float correlation = (float) calculateNormalizedCorrelation(&recorded.getData()[i],
&pulse.getData()[0],
pulse.size());
correlations[i] = correlation;
}
// Find highest peak in correlation array.
float peakCorrelation = 0.0;
int peakIndex = -1;
for (int i = 0; i < numCorrelations; i++) {
float value = abs(correlations[i]);
if (value > peakCorrelation) {
peakCorrelation = value;
peakIndex = i;
}
}
if (peakIndex < 0) {
ALOGE("%s() no signal for correlation\n", __func__);
return -2;
}
#if 0
// Dump correlation data for charting.
else {
const int margin = 50;
int startIndex = std::max(0, peakIndex - margin);
int endIndex = std::min(numCorrelations - 1, peakIndex + margin);
for (int index = startIndex; index < endIndex; index++) {
ALOGD("Correlation, %d, %f", index, correlations[index]);
}
}
#endif
report->latencyInFrames = peakIndex;
report->confidence = peakCorrelation;
return 0;
}
// ====================================================================================
class LoopbackProcessor {
public:
virtual ~LoopbackProcessor() = default;
enum result_code {
RESULT_OK = 0,
ERROR_NOISY = -99,
ERROR_VOLUME_TOO_LOW,
ERROR_VOLUME_TOO_HIGH,
ERROR_CONFIDENCE,
ERROR_INVALID_STATE,
ERROR_GLITCHES,
ERROR_NO_LOCK
};
virtual void prepareToTest() {
reset();
}
virtual void reset() {
mResult = 0;
mResetCount++;
}
virtual result_code processInputFrame(const float *frameData, int channelCount) = 0;
virtual result_code processOutputFrame(float *frameData, int channelCount) = 0;
void process(const float *inputData, int inputChannelCount, int numInputFrames,
float *outputData, int outputChannelCount, int numOutputFrames) {
int numBoth = std::min(numInputFrames, numOutputFrames);
// Process one frame at a time.
for (int i = 0; i < numBoth; i++) {
processInputFrame(inputData, inputChannelCount);
inputData += inputChannelCount;
processOutputFrame(outputData, outputChannelCount);
outputData += outputChannelCount;
}
// If there is more input than output.
for (int i = numBoth; i < numInputFrames; i++) {
processInputFrame(inputData, inputChannelCount);
inputData += inputChannelCount;
}
// If there is more output than input.
for (int i = numBoth; i < numOutputFrames; i++) {
processOutputFrame(outputData, outputChannelCount);
outputData += outputChannelCount;
}
}
virtual std::string analyze() = 0;
virtual void printStatus() {};
int32_t getResult() {
return mResult;
}
void setResult(int32_t result) {
mResult = result;
}
virtual bool isDone() {
return false;
}
virtual int save(const char *fileName) {
(void) fileName;
return -1;
}
virtual int load(const char *fileName) {
(void) fileName;
return -1;
}
virtual void setSampleRate(int32_t sampleRate) {
mSampleRate = sampleRate;
}
int32_t getSampleRate() const {
return mSampleRate;
}
int32_t getResetCount() const {
return mResetCount;
}
/** Called when not enough input frames could be read after synchronization.
*/
virtual void onInsufficientRead() {
reset();
}
protected:
int32_t mResetCount = 0;
private:
int32_t mSampleRate = kDefaultSampleRate;
int32_t mResult = 0;
};
class LatencyAnalyzer : public LoopbackProcessor {
public:
LatencyAnalyzer() : LoopbackProcessor() {}
virtual ~LatencyAnalyzer() = default;
virtual int32_t getProgress() const = 0;
virtual int getState() = 0;
// @return latency in frames
virtual int32_t getMeasuredLatency() = 0;
virtual double getMeasuredConfidence() = 0;
virtual double getBackgroundRMS() = 0;
virtual double getSignalRMS() = 0;
};
// ====================================================================================
/**
* Measure latency given a loopback stream data.
* Use an encoded bit train as the sound source because it
* has an unambiguous correlation value.
* Uses a state machine to cycle through various stages.
*
*/
class PulseLatencyAnalyzer : public LatencyAnalyzer {
public:
PulseLatencyAnalyzer() : LatencyAnalyzer() {
int32_t maxLatencyFrames = getSampleRate() * kMaxLatencyMillis / kMillisPerSecond;
int32_t numPulseBits = getSampleRate() * kPulseLengthMillis
/ (kFramesPerEncodedBit * kMillisPerSecond);
int32_t pulseLength = numPulseBits * kFramesPerEncodedBit;
mFramesToRecord = pulseLength + maxLatencyFrames;
mAudioRecording.allocate(mFramesToRecord);
mAudioRecording.setSampleRate(getSampleRate());
generateRandomPulse(pulseLength);
}
void generateRandomPulse(int32_t pulseLength) {
mPulse.allocate(pulseLength);
RandomPulseGenerator pulser(kFramesPerEncodedBit);
for (int i = 0; i < pulseLength; i++) {
mPulse.write(pulser.nextFloat());
}
}
int getState() override {
return mState;
}
void setSampleRate(int32_t sampleRate) override {
LoopbackProcessor::setSampleRate(sampleRate);
mAudioRecording.setSampleRate(sampleRate);
}
void reset() override {
LoopbackProcessor::reset();
mState = STATE_MEASURE_BACKGROUND;
mDownCounter = (int32_t) (getSampleRate() * kBackgroundMeasurementLengthSeconds);
mLoopCounter = 0;
mPulseCursor = 0;
mBackgroundSumSquare = 0.0f;
mBackgroundSumCount = 0;
mBackgroundRMS = 0.0f;
mSignalRMS = 0.0f;
mAudioRecording.clear();
mLatencyReport.reset();
}
bool hasEnoughData() {
return mAudioRecording.isFull();
}
bool isDone() override {
return mState == STATE_DONE;
}
int32_t getProgress() const override {
return mAudioRecording.size();
}
std::string analyze() override {
std::stringstream report;
report << "PulseLatencyAnalyzer ---------------\n";
report << LOOPBACK_RESULT_TAG "test.state = "
<< std::setw(8) << mState << "\n";
report << LOOPBACK_RESULT_TAG "test.state.name = "
<< convertStateToText(mState) << "\n";
report << LOOPBACK_RESULT_TAG "background.rms = "
<< std::setw(8) << mBackgroundRMS << "\n";
int32_t newResult = RESULT_OK;
if (mState != STATE_GOT_DATA) {
report << "WARNING - Bad state. Check volume on device.\n";
// setResult(ERROR_INVALID_STATE);
} else {
float gain = mAudioRecording.normalize(1.0f);
measureLatencyFromPulse(mAudioRecording,
mPulse,
&mLatencyReport);
if (mLatencyReport.confidence < kMinimumConfidence) {
report << " ERROR - confidence too low!";
newResult = ERROR_CONFIDENCE;
} else {
mSignalRMS = calculateRootMeanSquare(
&mAudioRecording.getData()[mLatencyReport.latencyInFrames], mPulse.size())
/ gain;
}
double latencyMillis = kMillisPerSecond * (double) mLatencyReport.latencyInFrames
/ getSampleRate();
report << LOOPBACK_RESULT_TAG "latency.frames = " << std::setw(8)
<< mLatencyReport.latencyInFrames << "\n";
report << LOOPBACK_RESULT_TAG "latency.msec = " << std::setw(8)
<< latencyMillis << "\n";
report << LOOPBACK_RESULT_TAG "latency.confidence = " << std::setw(8)
<< mLatencyReport.confidence << "\n";
}
mState = STATE_DONE;
if (getResult() == RESULT_OK) {
setResult(newResult);
}
return report.str();
}
int32_t getMeasuredLatency() override {
return mLatencyReport.latencyInFrames;
}
double getMeasuredConfidence() override {
return mLatencyReport.confidence;
}
double getBackgroundRMS() override {
return mBackgroundRMS;
}
double getSignalRMS() override {
return mSignalRMS;
}
bool isRecordingComplete() {
return mState == STATE_GOT_DATA;
}
void printStatus() override {
ALOGD("latency: st = %d = %s", mState, convertStateToText(mState));
}
result_code processInputFrame(const float *frameData, int channelCount) override {
echo_state nextState = mState;
mLoopCounter++;
switch (mState) {
case STATE_MEASURE_BACKGROUND:
// Measure background RMS on channel 0
mBackgroundSumSquare += frameData[0] * frameData[0];
mBackgroundSumCount++;
mDownCounter--;
if (mDownCounter <= 0) {
mBackgroundRMS = sqrtf(mBackgroundSumSquare / mBackgroundSumCount);
nextState = STATE_IN_PULSE;
mPulseCursor = 0;
}
break;
case STATE_IN_PULSE:
// Record input until the mAudioRecording is full.
mAudioRecording.write(frameData, channelCount, 1);
if (hasEnoughData()) {
nextState = STATE_GOT_DATA;
}
break;
case STATE_GOT_DATA:
case STATE_DONE:
default:
break;
}
mState = nextState;
return RESULT_OK;
}
result_code processOutputFrame(float *frameData, int channelCount) override {
switch (mState) {
case STATE_IN_PULSE:
if (mPulseCursor < mPulse.size()) {
float pulseSample = mPulse.getData()[mPulseCursor++];
for (int i = 0; i < channelCount; i++) {
frameData[i] = pulseSample;
}
} else {
for (int i = 0; i < channelCount; i++) {
frameData[i] = 0;
}
}
break;
case STATE_MEASURE_BACKGROUND:
case STATE_GOT_DATA:
case STATE_DONE:
default:
for (int i = 0; i < channelCount; i++) {
frameData[i] = 0.0f; // silence
}
break;
}
return RESULT_OK;
}
private:
enum echo_state {
STATE_MEASURE_BACKGROUND,
STATE_IN_PULSE,
STATE_GOT_DATA, // must match RoundTripLatencyActivity.java
STATE_DONE,
};
const char *convertStateToText(echo_state state) {
switch (state) {
case STATE_MEASURE_BACKGROUND:
return "INIT";
case STATE_IN_PULSE:
return "PULSE";
case STATE_GOT_DATA:
return "GOT_DATA";
case STATE_DONE:
return "DONE";
}
return "UNKNOWN";
}
int32_t mDownCounter = 500;
int32_t mLoopCounter = 0;
echo_state mState = STATE_MEASURE_BACKGROUND;
static constexpr int32_t kFramesPerEncodedBit = 8; // multiple of 2
static constexpr int32_t kPulseLengthMillis = 500;
static constexpr double kBackgroundMeasurementLengthSeconds = 0.5;
AudioRecording mPulse;
int32_t mPulseCursor = 0;
double mBackgroundSumSquare = 0.0;
int32_t mBackgroundSumCount = 0;
double mBackgroundRMS = 0.0;
double mSignalRMS = 0.0;
int32_t mFramesToRecord = 0;
AudioRecording mAudioRecording; // contains only the input after starting the pulse
LatencyReport mLatencyReport;
};
#endif // ANALYZER_LATENCY_ANALYZER_H

View File

@ -0,0 +1,98 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_MANCHESTER_ENCODER_H
#define ANALYZER_MANCHESTER_ENCODER_H
#include <cstdint>
/**
* Encode bytes using Manchester Coding scheme.
*
* Manchester Code is self clocking.
* There is a transition in the middle of every bit.
* Zero is high then low.
* One is low then high.
*
* This avoids having long DC sections that would droop when
* passed though analog circuits with AC coupling.
*
* IEEE 802.3 compatible.
*/
class ManchesterEncoder {
public:
ManchesterEncoder(int samplesPerPulse)
: mSamplesPerPulse(samplesPerPulse)
, mSamplesPerPulseHalf(samplesPerPulse / 2)
, mCursor(samplesPerPulse) {
}
virtual ~ManchesterEncoder() = default;
/**
* This will be called when the next byte is needed.
* @return
*/
virtual uint8_t onNextByte() = 0;
/**
* Generate the next floating point sample.
* @return
*/
virtual float nextFloat() {
advanceSample();
if (mCurrentBit) {
return (mCursor < mSamplesPerPulseHalf) ? -1.0f : 1.0f; // one
} else {
return (mCursor < mSamplesPerPulseHalf) ? 1.0f : -1.0f; // zero
}
}
protected:
/**
* This will be called when a new bit is ready to be encoded.
* It can be used to prepare the encoded samples.
* @param current
*/
virtual void onNextBit(bool /* current */) {};
void advanceSample() {
// Are we ready for a new bit?
if (++mCursor >= mSamplesPerPulse) {
mCursor = 0;
if (mBitsLeft == 0) {
mCurrentByte = onNextByte();
mBitsLeft = 8;
}
--mBitsLeft;
mCurrentBit = (mCurrentByte >> mBitsLeft) & 1;
onNextBit(mCurrentBit);
}
}
bool getCurrentBit() {
return mCurrentBit;
}
const int mSamplesPerPulse;
const int mSamplesPerPulseHalf;
int mCursor;
int mBitsLeft = 0;
uint8_t mCurrentByte = 0;
bool mCurrentBit = false;
};
#endif //ANALYZER_MANCHESTER_ENCODER_H

View File

@ -0,0 +1,68 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_PEAK_DETECTOR_H
#define ANALYZER_PEAK_DETECTOR_H
#include <math.h>
/**
* Measure a peak envelope by rising with the peaks,
* and decaying exponentially after each peak.
* The absolute value of the input signal is used.
*/
class PeakDetector {
public:
void reset() {
mLevel = 0.0;
}
double process(double input) {
mLevel *= mDecay; // exponential decay
input = fabs(input);
// never fall below the input signal
if (input > mLevel) {
mLevel = input;
}
return mLevel;
}
double getLevel() const {
return mLevel;
}
double getDecay() const {
return mDecay;
}
/**
* Multiply the level by this amount on every iteration.
* This provides an exponential decay curve.
* A value just under 1.0 is best, for example, 0.99;
* @param decay scale level for each input
*/
void setDecay(double decay) {
mDecay = decay;
}
private:
static constexpr double kDefaultDecay = 0.99f;
double mLevel = 0.0;
double mDecay = kDefaultDecay;
};
#endif //ANALYZER_PEAK_DETECTOR_H

View File

@ -0,0 +1,57 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_PSEUDORANDOM_H
#define ANALYZER_PSEUDORANDOM_H
#include <cctype>
class PseudoRandom {
public:
PseudoRandom(int64_t seed = 99887766)
: mSeed(seed)
{}
/**
* Returns the next random double from -1.0 to 1.0
*
* @return value from -1.0 to 1.0
*/
double nextRandomDouble() {
return nextRandomInteger() * (0.5 / (((int32_t)1) << 30));
}
/** Calculate random 32 bit number using linear-congruential method
* with known real-time performance.
*/
int32_t nextRandomInteger() {
#if __has_builtin(__builtin_mul_overflow) && __has_builtin(__builtin_add_overflow)
int64_t prod;
// Use values for 64-bit sequence from MMIX by Donald Knuth.
__builtin_mul_overflow(mSeed, (int64_t)6364136223846793005, &prod);
__builtin_add_overflow(prod, (int64_t)1442695040888963407, &mSeed);
#else
mSeed = (mSeed * (int64_t)6364136223846793005) + (int64_t)1442695040888963407;
#endif
return (int32_t) (mSeed >> 32); // The higher bits have a longer sequence.
}
private:
int64_t mSeed;
};
#endif //ANALYZER_PSEUDORANDOM_H

View File

@ -0,0 +1,43 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_RANDOM_PULSE_GENERATOR_H
#define ANALYZER_RANDOM_PULSE_GENERATOR_H
#include <stdlib.h>
#include "RoundedManchesterEncoder.h"
/**
* Encode random ones and zeros using Manchester Code per IEEE 802.3.
*/
class RandomPulseGenerator : public RoundedManchesterEncoder {
public:
RandomPulseGenerator(int samplesPerPulse)
: RoundedManchesterEncoder(samplesPerPulse) {
}
virtual ~RandomPulseGenerator() = default;
/**
* This will be called when the next byte is needed.
* @return random byte
*/
uint8_t onNextByte() override {
return static_cast<uint8_t>(rand());
}
};
#endif //ANALYZER_RANDOM_PULSE_GENERATOR_H

View File

@ -0,0 +1,88 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
#define ANALYZER_ROUNDED_MANCHESTER_ENCODER_H
#include <math.h>
#include <memory.h>
#include <stdlib.h>
#include "ManchesterEncoder.h"
/**
* Encode bytes using Manchester Code.
* Round the edges using a half cosine to reduce ringing caused by a hard edge.
*/
class RoundedManchesterEncoder : public ManchesterEncoder {
public:
RoundedManchesterEncoder(int samplesPerPulse)
: ManchesterEncoder(samplesPerPulse) {
int rampSize = samplesPerPulse / 4;
mZeroAfterZero = std::make_unique<float[]>(samplesPerPulse);
mZeroAfterOne = std::make_unique<float[]>(samplesPerPulse);
int sampleIndex = 0;
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
float phase = (rampIndex + 1) * M_PI / rampSize;
float sample = -cosf(phase);
mZeroAfterZero[sampleIndex] = sample;
mZeroAfterOne[sampleIndex] = 1.0f;
sampleIndex++;
}
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
mZeroAfterZero[sampleIndex] = 1.0f;
mZeroAfterOne[sampleIndex] = 1.0f;
sampleIndex++;
}
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
float phase = (rampIndex + 1) * M_PI / rampSize;
float sample = cosf(phase);
mZeroAfterZero[sampleIndex] = sample;
mZeroAfterOne[sampleIndex] = sample;
sampleIndex++;
}
for (int rampIndex = 0; rampIndex < rampSize; rampIndex++) {
mZeroAfterZero[sampleIndex] = -1.0f;
mZeroAfterOne[sampleIndex] = -1.0f;
sampleIndex++;
}
}
void onNextBit(bool current) override {
// Do we need to use the rounded edge?
mCurrentSamples = (current ^ mPreviousBit)
? mZeroAfterOne.get()
: mZeroAfterZero.get();
mPreviousBit = current;
}
float nextFloat() override {
advanceSample();
float output = mCurrentSamples[mCursor];
if (getCurrentBit()) output = -output;
return output;
}
private:
bool mPreviousBit = false;
float *mCurrentSamples = nullptr;
std::unique_ptr<float[]> mZeroAfterZero;
std::unique_ptr<float[]> mZeroAfterOne;
};
#endif //ANALYZER_ROUNDED_MANCHESTER_ENCODER_H

View File

@ -0,0 +1,41 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef NATIVE_AUDIO_ANDROID_DEBUG_H_H
#define NATIVE_AUDIO_ANDROID_DEBUG_H_H
#include <android/log.h>
#if 1
#define MODULE_NAME "OboeAudio"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, MODULE_NAME, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, MODULE_NAME, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, MODULE_NAME, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, MODULE_NAME, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, MODULE_NAME, __VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, MODULE_NAME, __VA_ARGS__)
#else
#define LOGV(...)
#define LOGD(...)
#define LOGI(...)
#define LOGW(...)
#define LOGE(...)
#define LOGF(...)
#endif
#endif //NATIVE_AUDIO_ANDROID_DEBUG_H_H

View File

@ -0,0 +1,35 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include "ExponentialShape.h"
ExponentialShape::ExponentialShape()
: FlowGraphFilter(1) {
}
int32_t ExponentialShape::onProcess(int32_t numFrames) {
float *inputs = input.getBuffer();
float *outputs = output.getBuffer();
for (int i = 0; i < numFrames; i++) {
float normalizedPhase = (inputs[i] * 0.5) + 0.5;
outputs[i] = mMinimum * powf(mRatio, normalizedPhase);
}
return numFrames;
}

View File

@ -0,0 +1,70 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_EXPONENTIAL_SHAPE_H
#define OBOETESTER_EXPONENTIAL_SHAPE_H
#include "flowgraph/FlowGraphNode.h"
/**
* Generate a exponential sweep between min and max.
*
* The waveform is not band-limited so it will have aliasing artifacts at higher frequencies.
*/
class ExponentialShape : public oboe::flowgraph::FlowGraphFilter {
public:
ExponentialShape();
int32_t onProcess(int32_t numFrames) override;
float getMinimum() const {
return mMinimum;
}
/**
* The minimum and maximum should not span zero.
* They should both be positive or both negative.
*
* @param minimum
*/
void setMinimum(float minimum) {
mMinimum = minimum;
mRatio = mMaximum / mMinimum;
}
float getMaximum() const {
return mMaximum;
}
/**
* The minimum and maximum should not span zero.
* They should both be positive or both negative.
*
* @param maximum
*/
void setMaximum(float maximum) {
mMaximum = maximum;
mRatio = mMaximum / mMinimum;
}
private:
float mMinimum = 0.0;
float mMaximum = 1.0;
float mRatio = 1.0;
};
#endif //OBOETESTER_EXPONENTIAL_SHAPE_H

View File

@ -0,0 +1,42 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include <unistd.h>
#include "ImpulseOscillator.h"
ImpulseOscillator::ImpulseOscillator()
: OscillatorBase() {
}
int32_t ImpulseOscillator::onProcess(int32_t numFrames) {
const float *frequencies = frequency.getBuffer();
const float *amplitudes = amplitude.getBuffer();
float *buffer = output.getBuffer();
for (int i = 0; i < numFrames; i++) {
float value = 0.0f;
mPhase += mFrequencyToPhaseIncrement * frequencies[i];
if (mPhase >= 1.0f) {
value = amplitudes[i]; // spike
mPhase -= 2.0f;
}
*buffer++ = value;
}
return numFrames;
}

View File

@ -0,0 +1,39 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_IMPULSE_GENERATOR_H
#define NATIVEOBOE_IMPULSE_GENERATOR_H
#include <unistd.h>
#include <sys/types.h>
#include "flowgraph/FlowGraphNode.h"
#include "OscillatorBase.h"
/**
* Generate a raw impulse equal to the amplitude.
* The output baseline is zero.
*
* The waveform is not band-limited so it will have aliasing artifacts at higher frequencies.
*/
class ImpulseOscillator : public OscillatorBase {
public:
ImpulseOscillator();
int32_t onProcess(int32_t numFrames) override;
};
#endif //NATIVEOBOE_IMPULSE_GENERATOR_H

View File

@ -0,0 +1,36 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "LinearShape.h"
using namespace oboe::flowgraph;
LinearShape::LinearShape()
: FlowGraphFilter(1) {
}
int32_t LinearShape::onProcess(int numFrames) {
float *inputs = input.getBuffer();
float *outputs = output.getBuffer();
for (int i = 0; i < numFrames; i++) {
float normalizedPhase = (inputs[i] * 0.5f) + 0.5f; // from 0.0 to 1.0
outputs[i] = mMinimum + (normalizedPhase * (mMaximum - mMinimum));
}
return numFrames;
}

View File

@ -0,0 +1,53 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_LINEAR_SHAPE_H
#define OBOETESTER_LINEAR_SHAPE_H
#include "flowgraph/FlowGraphNode.h"
/**
* Convert an input between -1.0 and +1.0 to a linear region between min and max.
*/
class LinearShape : public oboe::flowgraph::FlowGraphFilter {
public:
LinearShape();
int32_t onProcess(int numFrames) override;
float getMinimum() const {
return mMinimum;
}
void setMinimum(float minimum) {
mMinimum = minimum;
}
float getMaximum() const {
return mMaximum;
}
void setMaximum(float maximum) {
mMaximum = maximum;
}
private:
float mMinimum = 0.0;
float mMaximum = 1.0;
};
#endif //OBOETESTER_LINEAR_SHAPE_H

View File

@ -0,0 +1,26 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "OscillatorBase.h"
using namespace oboe::flowgraph;
OscillatorBase::OscillatorBase()
: frequency(*this, 1)
, amplitude(*this, 1)
, output(*this, 1) {
setSampleRate(48000);
}

View File

@ -0,0 +1,100 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVEOBOE_OSCILLATORBASE_H
#define NATIVEOBOE_OSCILLATORBASE_H
#include "flowgraph/FlowGraphNode.h"
/**
* Base class for various oscillators.
* The oscillator has a phase that ranges from -1.0 to +1.0.
* That makes it easier to implement simple algebraic waveforms.
*
* Subclasses must implement onProcess().
*
* This module has "frequency" and "amplitude" ports for control.
*/
class OscillatorBase : public oboe::flowgraph::FlowGraphNode {
public:
OscillatorBase();
virtual ~OscillatorBase() = default;
void setSampleRate(float sampleRate) {
mSampleRate = sampleRate;
mFrequencyToPhaseIncrement = 2.0f / sampleRate; // -1 to +1 is a range of 2
}
float getSampleRate() {
return mSampleRate;
}
/**
* This can be used to set the initial phase of an oscillator before starting.
* This is mostly used with an LFO.
* Calling this while the oscillator is running will cause sharp pops.
* @param phase between -1.0 and +1.0
*/
void setPhase(float phase) {
mPhase = phase;
}
float getPhase() {
return mPhase;
}
/**
* Control the frequency of the oscillator in Hz.
*/
oboe::flowgraph::FlowGraphPortFloatInput frequency;
/**
* Control the linear amplitude of the oscillator.
* Silence is 0.0.
* A typical full amplitude would be 1.0.
*/
oboe::flowgraph::FlowGraphPortFloatInput amplitude;
oboe::flowgraph::FlowGraphPortFloatOutput output;
protected:
/**
* Increment phase based on frequency in Hz.
* Frequency may be positive or negative.
*
* Frequency should not exceed +/- Nyquist Rate.
* Nyquist Rate is sampleRate/2.
*/
float incrementPhase(float frequency) {
mPhase += frequency * mFrequencyToPhaseIncrement;
// Wrap phase in the range of -1 to +1
if (mPhase >= 1.0f) {
mPhase -= 2.0f;
} else if (mPhase < -1.0f) {
mPhase += 2.0f;
}
return mPhase;
}
float mPhase = 0.0f; // phase that ranges from -1.0 to +1.0
float mSampleRate = 0.0f;
float mFrequencyToPhaseIncrement = 0.0f; // scaler for converting frequency to phase increment
};
#endif //NATIVEOBOE_OSCILLATORBASE_H

View File

@ -0,0 +1,39 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include <unistd.h>
#include "SawtoothOscillator.h"
SawtoothOscillator::SawtoothOscillator()
: OscillatorBase() {
}
int32_t SawtoothOscillator::onProcess(int32_t numFrames) {
const float *frequencies = frequency.getBuffer();
const float *amplitudes = amplitude.getBuffer();
float *buffer = output.getBuffer();
// Use the phase directly as a non-band-limited "sawtooth".
// WARNING: This will generate unpleasant aliasing artifacts at higher frequencies.
for (int i = 0; i < numFrames; i++) {
float phase = incrementPhase(frequencies[i]); // phase ranges from -1 to +1
*buffer++ = phase * amplitudes[i];
}
return numFrames;
}

View File

@ -0,0 +1,36 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_SAWTOOTH_OSCILLATOR_H
#define FLOWGRAPH_SAWTOOTH_OSCILLATOR_H
#include <unistd.h>
#include "OscillatorBase.h"
/**
* Oscillator that generates a sawtooth wave at the specified frequency and amplitude.
*
* The waveform is not band-limited so it will have aliasing artifacts at higher frequencies.
*/
class SawtoothOscillator : public OscillatorBase {
public:
SawtoothOscillator();
int32_t onProcess(int32_t numFrames) override;
};
#endif //FLOWGRAPH_SAWTOOTH_OSCILLATOR_H

View File

@ -0,0 +1,42 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include <unistd.h>
#include "SineOscillator.h"
/*
* This calls sinf() so it is not very efficient.
* A more efficient implementation might use a wave-table or a polynomial.
*/
SineOscillator::SineOscillator()
: OscillatorBase() {
}
int32_t SineOscillator::onProcess(int32_t numFrames) {
const float *frequencies = frequency.getBuffer();
const float *amplitudes = amplitude.getBuffer();
float *buffer = output.getBuffer();
// Generate sine wave.
for (int i = 0; i < numFrames; i++) {
float phase = incrementPhase(frequencies[i]); // phase ranges from -1 to +1
*buffer++ = sinf(phase * M_PI) * amplitudes[i];
}
return numFrames;
}

View File

@ -0,0 +1,34 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_SINE_OSCILLATOR_H
#define FLOWGRAPH_SINE_OSCILLATOR_H
#include <unistd.h>
#include "OscillatorBase.h"
/**
* Oscillator that generates a sine wave at the specified frequency and amplitude.
*/
class SineOscillator : public OscillatorBase {
public:
SineOscillator();
int32_t onProcess(int32_t numFrames) override;
};
#endif //FLOWGRAPH_SINE_OSCILLATOR_H

View File

@ -0,0 +1,40 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include <unistd.h>
#include "TriangleOscillator.h"
TriangleOscillator::TriangleOscillator()
: OscillatorBase() {
}
int32_t TriangleOscillator::onProcess(int32_t numFrames) {
const float *frequencies = frequency.getBuffer();
const float *amplitudes = amplitude.getBuffer();
float *buffer = output.getBuffer();
// Use the phase directly as a non-band-limited "triangle".
// WARNING: This will generate unpleasant aliasing artifacts at higher frequencies.
for (int i = 0; i < numFrames; i++) {
float phase = incrementPhase(frequencies[i]); // phase ranges from -1 to +1
float triangle = 2.0f * ((phase < 0.0f) ? (0.5f + phase): (0.5f - phase));
*buffer++ = triangle * amplitudes[i];
}
return numFrames;
}

View File

@ -0,0 +1,39 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLOWGRAPH_TRIANGLE_OSCILLATOR_H
#define FLOWGRAPH_TRIANGLE_OSCILLATOR_H
#include <unistd.h>
#include "OscillatorBase.h"
/**
* Oscillator that generates a triangle wave at the specified frequency and amplitude.
*
* The triangle output rises from -1 to +1 when the phase is between -1 and 0.
* The triangle output falls from +1 to 11 when the phase is between 0 and +1.
*
* The waveform is not band-limited so it will have aliasing artifacts at higher frequencies.
*/
class TriangleOscillator : public OscillatorBase {
public:
TriangleOscillator();
int32_t onProcess(int32_t numFrames) override;
};
#endif //FLOWGRAPH_TRIANGLE_OSCILLATOR_H

View File

@ -0,0 +1,700 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define MODULE_NAME "OboeTester"
#include <cassert>
#include <cstring>
#include <jni.h>
#include <stdint.h>
#include <thread>
#include "common/OboeDebug.h"
#include "oboe/Oboe.h"
#include "NativeAudioContext.h"
NativeAudioContext engine;
/*********************************************************************************/
/********************** JNI Prototypes *****************************************/
/*********************************************************************************/
extern "C" {
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_openNative(JNIEnv *env, jobject,
jint nativeApi,
jint sampleRate,
jint channelCount,
jint format,
jint sharingMode,
jint performanceMode,
jint inputPreset,
jint usage,
jint deviceId,
jint sessionId,
jint framesPerBurst,
jboolean channelConversionAllowed,
jboolean formatConversionAllowed,
jint rateConversionQuality,
jboolean isMMap,
jboolean isInput);
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_close(JNIEnv *env, jobject, jint);
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setThresholdInFrames(JNIEnv *env, jobject, jint, jint);
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getThresholdInFrames(JNIEnv *env, jobject, jint);
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getBufferCapacityInFrames(JNIEnv *env, jobject, jint);
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setNativeApi(JNIEnv *env, jobject, jint, jint);
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setUseCallback(JNIEnv *env, jclass type,
jboolean useCallback);
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setCallbackReturnStop(JNIEnv *env,
jclass type,
jboolean b);
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setCallbackSize(JNIEnv *env, jclass type,
jint callbackSize);
// ================= OboeAudioOutputStream ================================
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioOutputStream_trigger(JNIEnv *env, jobject);
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioOutputStream_setToneType(JNIEnv *env, jobject, jint);
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioOutputStream_setAmplitude(JNIEnv *env, jobject, jdouble);
/*********************************************************************************/
/********************** JNI Implementations *************************************/
/*********************************************************************************/
JNIEXPORT jboolean JNICALL
Java_com_mobileer_oboetester_NativeEngine_isMMapSupported(JNIEnv *env, jclass type) {
return oboe::AAudioExtensions::getInstance().isMMapSupported();
}
JNIEXPORT jboolean JNICALL
Java_com_mobileer_oboetester_NativeEngine_isMMapExclusiveSupported(JNIEnv *env, jclass type) {
return oboe::AAudioExtensions::getInstance().isMMapExclusiveSupported();
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_NativeEngine_setWorkaroundsEnabled(JNIEnv *env, jclass type,
jboolean enabled) {
oboe::OboeGlobals::setWorkaroundsEnabled(enabled);
}
JNIEXPORT jboolean JNICALL
Java_com_mobileer_oboetester_NativeEngine_areWorkaroundsEnabled(JNIEnv *env,
jclass type) {
return oboe::OboeGlobals::areWorkaroundsEnabled();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_openNative(
JNIEnv *env, jobject synth,
jint nativeApi,
jint sampleRate,
jint channelCount,
jint format,
jint sharingMode,
jint performanceMode,
jint inputPreset,
jint usage,
jint deviceId,
jint sessionId,
jint framesPerBurst,
jboolean channelConversionAllowed,
jboolean formatConversionAllowed,
jint rateConversionQuality,
jboolean isMMap,
jboolean isInput) {
LOGD("OboeAudioStream_openNative: sampleRate = %d, framesPerBurst = %d", sampleRate, framesPerBurst);
return (jint) engine.getCurrentActivity()->open(nativeApi,
sampleRate,
channelCount,
format,
sharingMode,
performanceMode,
inputPreset,
usage,
deviceId,
sessionId,
framesPerBurst,
channelConversionAllowed,
formatConversionAllowed,
rateConversionQuality,
isMMap,
isInput);
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_TestAudioActivity_startNative(JNIEnv *env, jobject) {
return (jint) engine.getCurrentActivity()->start();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_TestAudioActivity_pauseNative(JNIEnv *env, jobject) {
return (jint) engine.getCurrentActivity()->pause();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_TestAudioActivity_stopNative(JNIEnv *env, jobject) {
return (jint) engine.getCurrentActivity()->stop();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_TestAudioActivity_getFramesPerCallback(JNIEnv *env, jobject) {
return (jint) engine.getCurrentActivity()->getFramesPerCallback();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_startPlaybackNative(JNIEnv *env, jobject) {
return (jint) engine.getCurrentActivity()->startPlayback();
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_close(JNIEnv *env, jobject, jint streamIndex) {
engine.getCurrentActivity()->close(streamIndex);
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setBufferSizeInFrames(
JNIEnv *env, jobject, jint streamIndex, jint threshold) {
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
auto result = oboeStream->setBufferSizeInFrames(threshold);
return (!result)
? (jint) result.error()
: (jint) result.value();
}
return (jint) oboe::Result::ErrorNull;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getBufferSizeInFrames(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getBufferSizeInFrames();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getBufferCapacityInFrames(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getBufferCapacityInFrames();
}
return result;
}
static int convertAudioApiToNativeApi(oboe::AudioApi audioApi) {
switch(audioApi) {
case oboe::AudioApi::Unspecified:
return NATIVE_MODE_UNSPECIFIED;
case oboe::AudioApi::OpenSLES:
return NATIVE_MODE_OPENSLES;
case oboe::AudioApi::AAudio:
return NATIVE_MODE_AAUDIO;
default:
return -1;
}
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getNativeApi(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
oboe::AudioApi audioApi = oboeStream->getAudioApi();
result = convertAudioApiToNativeApi(audioApi);
LOGD("OboeAudioStream_getNativeApi got %d", result);
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getSampleRate(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getSampleRate();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getSharingMode(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = (jint) oboeStream->getSharingMode();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getPerformanceMode(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = (jint) oboeStream->getPerformanceMode();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getInputPreset(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = (jint) oboeStream->getInputPreset();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getFramesPerBurst(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getFramesPerBurst();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getChannelCount(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getChannelCount();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getFormat(JNIEnv *env, jobject instance, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = (jint) oboeStream->getFormat();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getUsage(JNIEnv *env, jobject instance, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = (jint) oboeStream->getUsage();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getDeviceId(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getDeviceId();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getSessionId(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getSessionId();
}
return result;
}
JNIEXPORT jlong JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getFramesWritten(
JNIEnv *env, jobject, jint streamIndex) {
jlong result = (jint) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getFramesWritten();
}
return result;
}
JNIEXPORT jlong JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getFramesRead(
JNIEnv *env, jobject, jint streamIndex) {
jlong result = (jlong) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
result = oboeStream->getFramesRead();
}
return result;
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getXRunCount(
JNIEnv *env, jobject, jint streamIndex) {
jint result = (jlong) oboe::Result::ErrorNull;
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
auto oboeResult = oboeStream->getXRunCount();
if (!oboeResult) {
result = (jint) oboeResult.error();
} else {
result = oboeResult.value();
}
}
return result;
}
JNIEXPORT jlong JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getCallbackCount(
JNIEnv *env, jobject) {
return engine.getCurrentActivity()->getCallbackCount();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getLastErrorCallbackResult(
JNIEnv *env, jobject, jint streamIndex) {
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
return (jint) oboeStream->getLastErrorCallbackResult();
}
return 0;
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getTimestampLatency(JNIEnv *env,
jobject instance,
jint streamIndex) {
return engine.getCurrentActivity()->getTimestampLatency(streamIndex);
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getCpuLoad(JNIEnv *env, jobject instance, jint streamIndex) {
return engine.getCurrentActivity()->getCpuLoad();
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setWorkload(
JNIEnv *env, jobject, jdouble workload) {
engine.getCurrentActivity()->setWorkload(workload);
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getState(JNIEnv *env, jobject instance, jint streamIndex) {
std::shared_ptr<oboe::AudioStream> oboeStream = engine.getCurrentActivity()->getStream(streamIndex);
if (oboeStream != nullptr) {
auto state = oboeStream->getState();
if (state != oboe::StreamState::Starting && state != oboe::StreamState::Started) {
oboe::Result result = oboeStream->waitForStateChange(
oboe::StreamState::Uninitialized,
&state, 0);
if (result != oboe::Result::OK){
if (result == oboe::Result::ErrorClosed) {
state = oboe::StreamState::Closed;
} else if (result == oboe::Result::ErrorDisconnected){
state = oboe::StreamState::Disconnected;
} else {
state = oboe::StreamState::Unknown;
}
}
}
return (jint) state;
}
return -1;
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_AudioInputTester_getPeakLevel(JNIEnv *env,
jobject instance,
jint index) {
return engine.getCurrentActivity()->getPeakLevel(index);
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setUseCallback(JNIEnv *env, jclass type,
jboolean useCallback) {
ActivityContext::mUseCallback = useCallback;
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setCallbackReturnStop(JNIEnv *env, jclass type,
jboolean b) {
OboeStreamCallbackProxy::setCallbackReturnStop(b);
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_setCallbackSize(JNIEnv *env, jclass type,
jint callbackSize) {
ActivityContext::callbackSize = callbackSize;
}
JNIEXPORT jboolean JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_isMMap(JNIEnv *env, jobject instance, jint streamIndex) {
return engine.getCurrentActivity()->isMMapUsed(streamIndex);
}
// ================= OboeAudioOutputStream ================================
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioOutputStream_trigger(
JNIEnv *env, jobject) {
engine.getCurrentActivity()->trigger();
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioOutputStream_setChannelEnabled(
JNIEnv *env, jobject, jint channelIndex, jboolean enabled) {
engine.getCurrentActivity()->setChannelEnabled(channelIndex, enabled);
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_OboeAudioOutputStream_setSignalType(
JNIEnv *env, jobject, jint signalType) {
engine.getCurrentActivity()->setSignalType(signalType);
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_OboeAudioStream_getOboeVersionNumber(JNIEnv *env,
jclass type) {
return OBOE_VERSION_NUMBER;
}
// ==========================================================================
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_TestAudioActivity_setActivityType(JNIEnv *env,
jobject instance,
jint activityType) {
engine.setActivityType(activityType);
}
// ==========================================================================
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_TestInputActivity_saveWaveFile(JNIEnv *env,
jobject instance,
jstring fileName) {
const char *str = env->GetStringUTFChars(fileName, nullptr);
LOGD("nativeSaveFile(%s)", str);
jint result = engine.getCurrentActivity()->saveWaveFile(str);
env->ReleaseStringUTFChars(fileName, str);
return result;
}
// ==========================================================================
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_TestInputActivity_setMinimumFramesBeforeRead(JNIEnv *env,
jobject instance,
jint numFrames) {
engine.getCurrentActivity()->setMinimumFramesBeforeRead(numFrames);
}
// ==========================================================================
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_EchoActivity_setDelayTime(JNIEnv *env,
jobject instance,
jdouble delayTimeSeconds) {
engine.setDelayTime(delayTimeSeconds);
}
JNIEXPORT int JNICALL
Java_com_mobileer_oboetester_EchoActivity_getColdStartInputMillis(JNIEnv *env,
jobject instance) {
return engine.getCurrentActivity()->getColdStartInputMillis();
}
JNIEXPORT int JNICALL
Java_com_mobileer_oboetester_EchoActivity_getColdStartOutputMillis(JNIEnv *env,
jobject instance) {
return engine.getCurrentActivity()->getColdStartOutputMillis();
}
// ==========================================================================
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_RoundTripLatencyActivity_getAnalyzerProgress(JNIEnv *env,
jobject instance) {
return engine.mActivityRoundTripLatency.getLatencyAnalyzer()->getProgress();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_RoundTripLatencyActivity_getMeasuredLatency(JNIEnv *env,
jobject instance) {
return engine.mActivityRoundTripLatency.getLatencyAnalyzer()->getMeasuredLatency();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_RoundTripLatencyActivity_getMeasuredConfidence(JNIEnv *env,
jobject instance) {
return engine.mActivityRoundTripLatency.getLatencyAnalyzer()->getMeasuredConfidence();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_RoundTripLatencyActivity_getBackgroundRMS(JNIEnv *env,
jobject instance) {
return engine.mActivityRoundTripLatency.getLatencyAnalyzer()->getBackgroundRMS();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_RoundTripLatencyActivity_getSignalRMS(JNIEnv *env,
jobject instance) {
return engine.mActivityRoundTripLatency.getLatencyAnalyzer()->getSignalRMS();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_AnalyzerActivity_getMeasuredResult(JNIEnv *env,
jobject instance) {
return engine.mActivityRoundTripLatency.getLatencyAnalyzer()->getResult();
}
// ==========================================================================
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_AnalyzerActivity_getAnalyzerState(JNIEnv *env,
jobject instance) {
return ((ActivityFullDuplex *)engine.getCurrentActivity())->getState();
}
JNIEXPORT jboolean JNICALL
Java_com_mobileer_oboetester_AnalyzerActivity_isAnalyzerDone(JNIEnv *env,
jobject instance) {
return ((ActivityFullDuplex *)engine.getCurrentActivity())->isAnalyzerDone();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_AnalyzerActivity_getResetCount(JNIEnv *env,
jobject instance) {
return ((ActivityFullDuplex *)engine.getCurrentActivity())->getResetCount();
}
// ==========================================================================
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_GlitchActivity_getGlitchCount(JNIEnv *env,
jobject instance) {
return engine.mActivityGlitches.getGlitchAnalyzer()->getGlitchCount();
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_GlitchActivity_getStateFrameCount(JNIEnv *env,
jobject instance,
jint state) {
return engine.mActivityGlitches.getGlitchAnalyzer()->getStateFrameCount(state);
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_GlitchActivity_getSignalToNoiseDB(JNIEnv *env,
jobject instance) {
return engine.mActivityGlitches.getGlitchAnalyzer()->getSignalToNoiseDB();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_GlitchActivity_getPeakAmplitude(JNIEnv *env,
jobject instance) {
return engine.mActivityGlitches.getGlitchAnalyzer()->getPeakAmplitude();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_TestDataPathsActivity_getMagnitude(JNIEnv *env,
jobject instance) {
return engine.mActivityDataPath.getDataPathAnalyzer()->getMagnitude();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_TestDataPathsActivity_getMaxMagnitude(JNIEnv *env,
jobject instance) {
return engine.mActivityDataPath.getDataPathAnalyzer()->getMaxMagnitude();
}
JNIEXPORT jdouble JNICALL
Java_com_mobileer_oboetester_TestDataPathsActivity_getPhase(JNIEnv *env,
jobject instance) {
return engine.mActivityDataPath.getDataPathAnalyzer()->getPhaseOffset();
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_GlitchActivity_setTolerance(JNIEnv *env,
jobject instance,
jfloat tolerance) {
if (engine.mActivityGlitches.getGlitchAnalyzer()) {
engine.mActivityGlitches.getGlitchAnalyzer()->setTolerance(tolerance);
}
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_GlitchActivity_setInputChannelNative(JNIEnv *env,
jobject instance,
jint channel) {
if (engine.mActivityGlitches.getGlitchAnalyzer()) {
engine.mActivityGlitches.getGlitchAnalyzer()->setInputChannel(channel);
}
if (engine.mActivityDataPath.getDataPathAnalyzer()) {
engine.mActivityDataPath.getDataPathAnalyzer()->setInputChannel(channel);
}
}
JNIEXPORT void JNICALL
Java_com_mobileer_oboetester_GlitchActivity_setOutputChannelNative(JNIEnv *env,
jobject instance,
jint channel) {
if (engine.mActivityGlitches.getGlitchAnalyzer()) {
engine.mActivityGlitches.getGlitchAnalyzer()->setOutputChannel(channel);
}
if (engine.mActivityDataPath.getDataPathAnalyzer()) {
engine.mActivityDataPath.getDataPathAnalyzer()->setOutputChannel(channel);
}
}
JNIEXPORT jint JNICALL
Java_com_mobileer_oboetester_ManualGlitchActivity_getGlitch(JNIEnv *env, jobject instance,
jfloatArray waveform_) {
float *waveform = env->GetFloatArrayElements(waveform_, nullptr);
jsize length = env->GetArrayLength(waveform_);
jsize numSamples = 0;
auto *analyzer = engine.mActivityGlitches.getGlitchAnalyzer();
if (analyzer) {
numSamples = analyzer->getLastGlitch(waveform, length);
}
env->ReleaseFloatArrayElements(waveform_, waveform, 0);
return numSamples;
}
}

View File

@ -0,0 +1,145 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OBOETESTER_UNUSED_H
#define OBOETESTER_UNUSED_H
// Store this code for later use.
#if 0
/*
FIR filter designed with
http://t-filter.appspot.com
sampling frequency: 48000 Hz
* 0 Hz - 8000 Hz
gain = 1.2
desired ripple = 5 dB
actual ripple = 5.595266169703693 dB
* 12000 Hz - 20000 Hz
gain = 0
desired attenuation = -40 dB
actual attenuation = -37.58691566571914 dB
*/
#define FILTER_TAP_NUM 11
static const float sFilterTaps8000[FILTER_TAP_NUM] = {
-0.05944219353343189f,
-0.07303434839503208f,
-0.037690487672689066f,
0.1870480506596512f,
0.3910337357836833f,
0.5333672385425637f,
0.3910337357836833f,
0.1870480506596512f,
-0.037690487672689066f,
-0.07303434839503208f,
-0.05944219353343189f
};
class LowPassFilter {
public:
/*
* Filter one input sample.
* @return filtered output
*/
float filter(float input) {
float output = 0.0f;
mX[mCursor] = input;
// Index backwards over x.
int xIndex = mCursor + FILTER_TAP_NUM;
// Write twice so we avoid having to wrap in the middle of the convolution.
mX[xIndex] = input;
for (int i = 0; i < FILTER_TAP_NUM; i++) {
output += sFilterTaps8000[i] * mX[xIndex--];
}
if (++mCursor >= FILTER_TAP_NUM) {
mCursor = 0;
}
return output;
}
/**
* @return true if PASSED
*/
bool test() {
// Measure the impulse of the filter at different phases so we exercise
// all the wraparound cases in the FIR.
for (int offset = 0; offset < (FILTER_TAP_NUM * 2); offset++ ) {
// LOGD("LowPassFilter: cursor = %d\n", mCursor);
// Offset by one each time.
if (filter(0.0f) != 0.0f) {
LOGD("ERROR: filter should return 0.0 before impulse response\n");
return false;
}
for (int i = 0; i < FILTER_TAP_NUM; i++) {
float output = filter((i == 0) ? 1.0f : 0.0f); // impulse
if (output != sFilterTaps8000[i]) {
LOGD("ERROR: filter should return impulse response\n");
return false;
}
}
for (int i = 0; i < FILTER_TAP_NUM; i++) {
if (filter(0.0f) != 0.0f) {
LOGD("ERROR: filter should return 0.0 after impulse response\n");
return false;
}
}
}
return true;
}
private:
float mX[FILTER_TAP_NUM * 2]{}; // twice as big as needed to avoid wrapping
int32_t mCursor = 0;
};
/**
* Low pass filter the recording using a simple FIR filter.
* Note that the lowpass filter cutoff tracks the sample rate.
* That is OK because the impulse width is a fixed number of samples.
*/
void lowPassFilter() {
for (int i = 0; i < mFrameCounter; i++) {
mData[i] = mLowPassFilter.filter(mData[i]);
}
}
/**
* Remove DC offset using a one-pole one-zero IIR filter.
*/
void dcBlocker() {
const float R = 0.996; // narrow notch at zero Hz
float x1 = 0.0;
float y1 = 0.0;
for (int i = 0; i < mFrameCounter; i++) {
const float x = mData[i];
const float y = x - x1 + (R * y1);
mData[i] = y;
y1 = y;
x1 = x;
}
}
#endif
#endif //OBOETESTER_UNUSED_H

View File

@ -0,0 +1,132 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "WaveFileWriter.h"
void WaveFileWriter::WaveFileWriter::write(float value) {
if (!headerWritten) {
writeHeader();
}
if (bitsPerSample == 24) {
writePCM24(value);
} else {
writePCM16(value);
}
}
void WaveFileWriter::write(float *buffer, int32_t startSample, int32_t numSamples) {
for (int32_t i = 0; i < numSamples; i++) {
write(buffer[startSample + i]);
}
}
void WaveFileWriter::writeIntLittle(int32_t n) {
writeByte(n);
writeByte(n >> 8);
writeByte(n >> 16);
writeByte(n >> 24);
}
void WaveFileWriter::writeShortLittle(int16_t n) {
writeByte(n);
writeByte(n >> 8);
}
void WaveFileWriter::writeFormatChunk() {
int32_t bytesPerSample = (bitsPerSample + 7) / 8;
writeByte('f');
writeByte('m');
writeByte('t');
writeByte(' ');
writeIntLittle(16); // chunk size
writeShortLittle(WAVE_FORMAT_PCM);
writeShortLittle((int16_t) mSamplesPerFrame);
writeIntLittle(mFrameRate);
// bytes/second
writeIntLittle(mFrameRate * mSamplesPerFrame * bytesPerSample);
// block align
writeShortLittle((int16_t) (mSamplesPerFrame * bytesPerSample));
writeShortLittle((int16_t) bitsPerSample);
}
void WaveFileWriter::writeDataChunkHeader() {
writeByte('d');
writeByte('a');
writeByte('t');
writeByte('a');
// Maximum size is not strictly correct but is commonly used
// when we do not know the final size.
writeIntLittle(INT32_MAX);
}
void WaveFileWriter::writeHeader() {
writeRiffHeader();
writeFormatChunk();
writeDataChunkHeader();
headerWritten = true;
}
// Write lower 8 bits. Upper bits ignored.
void WaveFileWriter::writeByte(uint8_t b) {
mOutputStream->write(b);
bytesWritten += 1;
}
void WaveFileWriter::writePCM24(float value) {
// Offset before casting so that we can avoid using floor().
// Also round by adding 0.5 so that very small signals go to zero.
float temp = (PCM24_MAX * value) + 0.5 - PCM24_MIN;
int32_t sample = ((int) temp) + PCM24_MIN;
// clip to 24-bit range
if (sample > PCM24_MAX) {
sample = PCM24_MAX;
} else if (sample < PCM24_MIN) {
sample = PCM24_MIN;
}
// encode as little-endian
writeByte(sample); // little end
writeByte(sample >> 8); // middle
writeByte(sample >> 16); // big end
}
void WaveFileWriter::writePCM16(float value) {
// Offset before casting so that we can avoid using floor().
// Also round by adding 0.5 so that very small signals go to zero.
float temp = (INT16_MAX * value) + 0.5 - INT16_MIN;
int32_t sample = ((int) temp) + INT16_MIN;
if (sample > INT16_MAX) {
sample = INT16_MAX;
} else if (sample < INT16_MIN) {
sample = INT16_MIN;
}
writeByte(sample); // little end
writeByte(sample >> 8); // big end
}
void WaveFileWriter::writeRiffHeader() {
writeByte('R');
writeByte('I');
writeByte('F');
writeByte('F');
// Maximum size is not strictly correct but is commonly used
// when we do not know the final size.
writeIntLittle(INT32_MAX);
writeByte('W');
writeByte('A');
writeByte('V');
writeByte('E');
}

View File

@ -0,0 +1,155 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Based on the WaveFileWriter in Java from the open source JSyn library by Phil Burk
// https://github.com/philburk/jsyn/blob/master/src/com/jsyn/util/WaveFileWriter.java
#ifndef UTIL_WAVE_FILE_WRITER
#define UTIL_WAVE_FILE_WRITER
#include <cassert>
#include <stdio.h>
class WaveFileOutputStream {
public:
virtual ~WaveFileOutputStream() = default;
virtual void write(uint8_t b) = 0;
};
/**
* Write audio data to a WAV file.
*
* <pre>
* <code>
* WaveFileWriter writer = new WaveFileWriter(waveFileOutputStream);
* writer.setFrameRate(48000);
* writer.setBitsPerSample(24);
* writer.write(floatArray, 0, numSamples);
* writer.close();
* </code>
* </pre>
*
*/
class WaveFileWriter {
public:
/**
* Create an object that will write a WAV file image to the specified stream.
*
* @param outputStream stream to receive the bytes
* @throws FileNotFoundException
*/
WaveFileWriter(WaveFileOutputStream *outputStream) {
mOutputStream = outputStream;
}
/**
* @param frameRate default is 44100
*/
void setFrameRate(int32_t frameRate) {
mFrameRate = frameRate;
}
int32_t getFrameRate() const {
return mFrameRate;
}
/**
* For stereo, set this to 2. Default is mono = 1.
* Also known as ChannelCount
*/
void setSamplesPerFrame(int32_t samplesPerFrame) {
mSamplesPerFrame = samplesPerFrame;
}
int32_t getSamplesPerFrame() const {
return mSamplesPerFrame;
}
/** Only 16 or 24 bit samples supported at the moment. Default is 16. */
void setBitsPerSample(int32_t bits) {
assert((bits == 16) || (bits == 24));
bitsPerSample = bits;
}
int32_t getBitsPerSample() const {
return bitsPerSample;
}
void close() {
}
/** Write single audio data value to the WAV file. */
void write(float value);
/**
* Write a buffer to the WAV file.
*/
void write(float *buffer, int32_t startSample, int32_t numSamples);
private:
/**
* Write a 32 bit integer to the stream in Little Endian format.
*/
void writeIntLittle(int32_t n);
/**
* Write a 16 bit integer to the stream in Little Endian format.
*/
void writeShortLittle(int16_t n);
/**
* Write an 'fmt ' chunk to the WAV file containing the given information.
*/
void writeFormatChunk();
/**
* Write a 'data' chunk header to the WAV file. This should be followed by call to
* writeShortLittle() to write the data to the chunk.
*/
void writeDataChunkHeader();
/**
* Write a simple WAV header for PCM data.
*/
void writeHeader();
// Write lower 8 bits. Upper bits ignored.
void writeByte(uint8_t b);
void writePCM24(float value);
void writePCM16(float value);
/**
* Write a 'RIFF' file header and a 'WAVE' ID to the WAV file.
*/
void writeRiffHeader();
static constexpr int WAVE_FORMAT_PCM = 1;
WaveFileOutputStream *mOutputStream = nullptr;
int32_t mFrameRate = 48000;
int32_t mSamplesPerFrame = 1;
int32_t bitsPerSample = 16;
int32_t bytesWritten = 0;
bool headerWritten = false;
static constexpr int32_t PCM24_MIN = -(1 << 23);
static constexpr int32_t PCM24_MAX = (1 << 23) - 1;
};
#endif /* UTIL_WAVE_FILE_WRITER */

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

@ -0,0 +1,58 @@
package com.mobileer.audio_device;
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import com.mobileer.oboetester.R;
/**
* Provides views for a list of audio devices. Usually used as an Adapter for a Spinner or ListView.
*/
public class AudioDeviceAdapter extends ArrayAdapter<AudioDeviceListEntry> {
public AudioDeviceAdapter(Context context) {
super(context, R.layout.audio_devices);
}
@NonNull
@Override
public View getView(int position, @Nullable View convertView, @NonNull ViewGroup parent) {
return getDropDownView(position, convertView, parent);
}
@Override
public View getDropDownView(int position, @Nullable View convertView, @NonNull ViewGroup parent) {
View rowView = convertView;
if (rowView == null) {
LayoutInflater inflater = LayoutInflater.from(parent.getContext());
rowView = inflater.inflate(R.layout.audio_devices, parent, false);
}
TextView deviceName = (TextView) rowView.findViewById(R.id.device_name);
AudioDeviceListEntry deviceInfo = getItem(position);
deviceName.setText(deviceInfo.getName());
return rowView;
}
}

View File

@ -0,0 +1,142 @@
package com.mobileer.audio_device;
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.media.AudioDeviceInfo;
public class AudioDeviceInfoConverter {
/**
* Converts an {@link AudioDeviceInfo} object into a human readable representation
*
* @param adi The AudioDeviceInfo object to be converted to a String
* @return String containing all the information from the AudioDeviceInfo object
*/
public static String toString(AudioDeviceInfo adi){
StringBuilder sb = new StringBuilder();
sb.append("Id: ");
sb.append(adi.getId());
sb.append("\nProduct name: ");
sb.append(adi.getProductName());
sb.append("\nType: ");
sb.append(typeToString(adi.getType()));
sb.append("\nIs source: ");
sb.append((adi.isSource() ? "Yes" : "No"));
sb.append("\nIs sink: ");
sb.append((adi.isSink() ? "Yes" : "No"));
sb.append("\nChannel counts: ");
int[] channelCounts = adi.getChannelCounts();
sb.append(intArrayToString(channelCounts));
sb.append("\nChannel masks: ");
int[] channelMasks = adi.getChannelMasks();
sb.append(intArrayToString(channelMasks));
sb.append("\nChannel index masks: ");
int[] channelIndexMasks = adi.getChannelIndexMasks();
sb.append(intArrayToString(channelIndexMasks));
sb.append("\nEncodings: ");
int[] encodings = adi.getEncodings();
sb.append(intArrayToString(encodings));
sb.append("\nSample Rates: ");
int[] sampleRates = adi.getSampleRates();
sb.append(intArrayToString(sampleRates));
sb.append("\n");
return sb.toString();
}
/**
* Converts an integer array into a string where each int is separated by a space
*
* @param integerArray the integer array to convert to a string
* @return string containing all the integer values separated by spaces
*/
private static String intArrayToString(int[] integerArray){
StringBuilder sb = new StringBuilder();
for (int i = 0; i < integerArray.length; i++){
sb.append(integerArray[i]);
if (i != integerArray.length -1) sb.append(" ");
}
return sb.toString();
}
/**
* Converts the value from {@link AudioDeviceInfo#getType()} into a human
* readable string
* @param type One of the {@link AudioDeviceInfo}.TYPE_* values
* e.g. AudioDeviceInfo.TYPE_BUILT_IN_SPEAKER
* @return string which describes the type of audio device
*/
static String typeToString(int type){
switch (type) {
case AudioDeviceInfo.TYPE_AUX_LINE:
return "auxiliary line-level connectors";
case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
return "Bluetooth A2DP";
case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
return "Bluetooth telephony SCO";
case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
return "built-in earpiece";
case AudioDeviceInfo.TYPE_BUILTIN_MIC:
return "built-in microphone";
case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
return "built-in speaker";
case 0x18: // AudioDeviceInfo.TYPE_BUILTIN_SPEAKER_SAFE:
return "built-in speaker safe";
case AudioDeviceInfo.TYPE_BUS:
return "BUS";
case AudioDeviceInfo.TYPE_DOCK:
return "DOCK";
case AudioDeviceInfo.TYPE_FM:
return "FM";
case AudioDeviceInfo.TYPE_FM_TUNER:
return "FM tuner";
case AudioDeviceInfo.TYPE_HDMI:
return "HDMI";
case AudioDeviceInfo.TYPE_HDMI_ARC:
return "HDMI audio return channel";
case AudioDeviceInfo.TYPE_IP:
return "IP";
case AudioDeviceInfo.TYPE_LINE_ANALOG:
return "line analog";
case AudioDeviceInfo.TYPE_LINE_DIGITAL:
return "line digital";
case AudioDeviceInfo.TYPE_TELEPHONY:
return "telephony";
case AudioDeviceInfo.TYPE_TV_TUNER:
return "TV tuner";
case AudioDeviceInfo.TYPE_USB_ACCESSORY:
return "USB accessory";
case AudioDeviceInfo.TYPE_USB_DEVICE:
return "USB device";
case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
return "wired headphones";
case AudioDeviceInfo.TYPE_WIRED_HEADSET:
return "wired headset";
default:
case AudioDeviceInfo.TYPE_UNKNOWN:
return "unknown=" + type;
}
}
}

View File

@ -0,0 +1,95 @@
package com.mobileer.audio_device;
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.annotation.TargetApi;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import java.util.List;
import java.util.Vector;
/**
* POJO which represents basic information for an audio device.
*
* Example: id: 8, deviceName: "built-in speaker"
*/
public class AudioDeviceListEntry {
private int mId;
private String mName;
public AudioDeviceListEntry(int deviceId, String deviceName){
mId = deviceId;
mName = deviceName;
}
public int getId() {
return mId;
}
public String getName(){
return mName;
}
public String toString(){
return getName();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AudioDeviceListEntry that = (AudioDeviceListEntry) o;
if (mId != that.mId) return false;
return mName != null ? mName.equals(that.mName) : that.mName == null;
}
@Override
public int hashCode() {
int result = mId;
result = 31 * result + (mName != null ? mName.hashCode() : 0);
return result;
}
/**
* Create a list of AudioDeviceListEntry objects from a list of AudioDeviceInfo objects.
*
* @param devices A list of {@Link AudioDeviceInfo} objects
* @param directionType Only audio devices with this direction will be included in the list.
* Valid values are GET_DEVICES_ALL, GET_DEVICES_OUTPUTS and
* GET_DEVICES_INPUTS.
* @return A list of AudioDeviceListEntry objects
*/
@TargetApi(23)
static List<AudioDeviceListEntry> createListFrom(AudioDeviceInfo[] devices, int directionType){
List<AudioDeviceListEntry> listEntries = new Vector<>();
for (AudioDeviceInfo info : devices) {
if (directionType == AudioManager.GET_DEVICES_ALL ||
(directionType == AudioManager.GET_DEVICES_OUTPUTS && info.isSink()) ||
(directionType == AudioManager.GET_DEVICES_INPUTS && info.isSource())) {
listEntries.add(new AudioDeviceListEntry(info.getId(),
info.getId() + ": " +
info.getProductName() + " " +
AudioDeviceInfoConverter.typeToString(info.getType())));
}
}
return listEntries;
}
}

View File

@ -0,0 +1,127 @@
package com.mobileer.audio_device;
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources.Theme;
import android.media.AudioDeviceCallback;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
import android.util.AttributeSet;
import android.widget.Spinner;
import com.mobileer.oboetester.R;
import java.util.List;
public class AudioDeviceSpinner extends Spinner {
private static final int AUTO_SELECT_DEVICE_ID = 0;
private static final String TAG = AudioDeviceSpinner.class.getName();
private int mDirectionType;
private AudioDeviceAdapter mDeviceAdapter;
private AudioManager mAudioManager;
private Context mContext;
public AudioDeviceSpinner(Context context){
super(context);
setup(context);
}
public AudioDeviceSpinner(Context context, int mode){
super(context, mode);
setup(context);
}
public AudioDeviceSpinner(Context context, AttributeSet attrs){
super(context, attrs);
setup(context);
}
public AudioDeviceSpinner(Context context, AttributeSet attrs, int defStyleAttr){
super(context, attrs, defStyleAttr);
setup(context);
}
public AudioDeviceSpinner(Context context, AttributeSet attrs, int defStyleAttr, int mode){
super(context, attrs, defStyleAttr, mode);
setup(context);
}
public AudioDeviceSpinner(Context context, AttributeSet attrs, int defStyleAttr,
int defStyleRes, int mode){
super(context, attrs, defStyleAttr, defStyleRes, mode);
setup(context);
}
public AudioDeviceSpinner(Context context, AttributeSet attrs, int defStyleAttr,
int defStyleRes, int mode, Theme popupTheme){
super(context, attrs, defStyleAttr, defStyleRes, mode, popupTheme);
setup(context);
}
private void setup(Context context){
mContext = context;
mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
mDeviceAdapter = new AudioDeviceAdapter(context);
setAdapter(mDeviceAdapter);
// Add a default entry to the list and select it
mDeviceAdapter.add(new AudioDeviceListEntry(AUTO_SELECT_DEVICE_ID,
mContext.getString(R.string.auto_select)));
setSelection(0);
}
@TargetApi(23)
public void setDirectionType(int directionType){
this.mDirectionType = directionType;
setupAudioDeviceCallback();
}
@TargetApi(23)
private void setupAudioDeviceCallback(){
// Note that we will immediately receive a call to onDevicesAdded with the list of
// devices which are currently connected.
mAudioManager.registerAudioDeviceCallback(new AudioDeviceCallback() {
@Override
public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {
List<AudioDeviceListEntry> deviceList =
AudioDeviceListEntry.createListFrom(addedDevices, mDirectionType);
if (deviceList.size() > 0){
// Prevent duplicate entries caused by b/80138804
for (AudioDeviceListEntry entry : deviceList){
mDeviceAdapter.remove(entry);
}
mDeviceAdapter.addAll(deviceList);
}
}
public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {
List<AudioDeviceListEntry> deviceList =
AudioDeviceListEntry.createListFrom(removedDevices, mDirectionType);
for (AudioDeviceListEntry entry : deviceList){
mDeviceAdapter.remove(entry);
}
setSelection(0);
}
}, null);
}
}

View File

@ -0,0 +1,243 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Store SchedulableEvents in a timestamped buffer.
* Events may be written in any order.
* Events will be read in sorted order.
* Events with the same timestamp will be read in the order they were added.
*
* Only one Thread can write into the buffer.
* And only one Thread can read from the buffer.
*/
public class EventScheduler {
private static final long NANOS_PER_MILLI = 1000000;
private final Object lock = new Object();
private SortedMap<Long, FastEventQueue> mEventBuffer;
// This does not have to be guarded. It is only set by the writing thread.
// If the reader sees a null right before being set then that is OK.
private FastEventQueue mEventPool = null;
private static final int MAX_POOL_SIZE = 200;
public EventScheduler() {
mEventBuffer = new TreeMap<Long, FastEventQueue>();
}
// If we keep at least one node in the list then it can be atomic
// and non-blocking.
private class FastEventQueue {
// One thread takes from the beginning of the list.
volatile SchedulableEvent mFirst;
// A second thread returns events to the end of the list.
volatile SchedulableEvent mLast;
volatile long mEventsAdded;
volatile long mEventsRemoved;
FastEventQueue(SchedulableEvent event) {
mFirst = event;
mLast = mFirst;
mEventsAdded = 1; // Always created with one event added. Never empty.
mEventsRemoved = 0; // None removed yet.
}
int size() {
return (int)(mEventsAdded - mEventsRemoved);
}
/**
* Do not call this unless there is more than one event
* in the list.
* @return first event in the list
*/
public SchedulableEvent remove() {
// Take first event.
mEventsRemoved++;
SchedulableEvent event = mFirst;
mFirst = event.mNext;
return event;
}
/**
* @param event
*/
public void add(SchedulableEvent event) {
event.mNext = null;
mLast.mNext = event;
mLast = event;
mEventsAdded++;
}
}
/**
* Base class for events that can be stored in the EventScheduler.
*/
public static class SchedulableEvent {
private long mTimestamp;
private SchedulableEvent mNext = null;
/**
* @param timestamp
*/
public SchedulableEvent(long timestamp) {
mTimestamp = timestamp;
}
/**
* @return timestamp
*/
public long getTimestamp() {
return mTimestamp;
}
/**
* The timestamp should not be modified when the event is in the
* scheduling buffer.
*/
public void setTimestamp(long timestamp) {
mTimestamp = timestamp;
}
}
/**
* Get an event from the pool.
* Always leave at least one event in the pool.
* @return event or null
*/
public SchedulableEvent removeEventfromPool() {
SchedulableEvent event = null;
if (mEventPool != null && (mEventPool.size() > 1)) {
event = mEventPool.remove();
}
return event;
}
/**
* Return events to a pool so they can be reused.
*
* @param event
*/
public void addEventToPool(SchedulableEvent event) {
if (mEventPool == null) {
mEventPool = new FastEventQueue(event); // add event to pool
// If we already have enough items in the pool then just
// drop the event. This prevents unbounded memory leaks.
} else if (mEventPool.size() < MAX_POOL_SIZE) {
mEventPool.add(event);
}
}
/**
* Add an event to the scheduler. Events with the same time will be
* processed in order.
*
* @param event
*/
public void add(SchedulableEvent event) {
synchronized (lock) {
FastEventQueue list = mEventBuffer.get(event.getTimestamp());
if (list == null) {
long lowestTime = mEventBuffer.isEmpty() ? Long.MAX_VALUE
: mEventBuffer.firstKey();
list = new FastEventQueue(event);
mEventBuffer.put(event.getTimestamp(), list);
// If the event we added is earlier than the previous earliest
// event then notify any threads waiting for the next event.
if (event.getTimestamp() < lowestTime) {
lock.notify();
}
} else {
list.add(event);
}
}
}
// Caller must synchronize on lock before calling.
private SchedulableEvent removeNextEventLocked(long lowestTime) {
SchedulableEvent event;
FastEventQueue list = mEventBuffer.get(lowestTime);
// Remove list from tree if this is the last node.
if ((list.size() == 1)) {
mEventBuffer.remove(lowestTime);
}
event = list.remove();
return event;
}
/**
* Check to see if any scheduled events are ready to be processed.
*
* @param timestamp
* @return next event or null if none ready
*/
public SchedulableEvent getNextEvent(long time) {
SchedulableEvent event = null;
synchronized (lock) {
if (!mEventBuffer.isEmpty()) {
long lowestTime = mEventBuffer.firstKey();
// Is it time for this list to be processed?
if (lowestTime <= time) {
event = removeNextEventLocked(lowestTime);
}
}
}
// Log.i(TAG, "getNextEvent: event = " + event);
return event;
}
/**
* Return the next available event or wait until there is an event ready to
* be processed. This method assumes that the timestamps are in nanoseconds
* and that the current time is System.nanoTime().
*
* @return event
* @throws InterruptedException
*/
public SchedulableEvent waitNextEvent() throws InterruptedException {
SchedulableEvent event = null;
while (true) {
long millisToWait = Integer.MAX_VALUE;
synchronized (lock) {
if (!mEventBuffer.isEmpty()) {
long now = System.nanoTime();
long lowestTime = mEventBuffer.firstKey();
// Is it time for the earliest list to be processed?
if (lowestTime <= now) {
event = removeNextEventLocked(lowestTime);
break;
} else {
// Figure out how long to sleep until next event.
long nanosToWait = lowestTime - now;
// Add 1 millisecond so we don't wake up before it is
// ready.
millisToWait = 1 + (nanosToWait / NANOS_PER_MILLI);
// Clip 64-bit value to 32-bit max.
if (millisToWait > Integer.MAX_VALUE) {
millisToWait = Integer.MAX_VALUE;
}
}
}
lock.wait((int) millisToWait);
}
}
return event;
}
}

View File

@ -0,0 +1,102 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
/**
* MIDI related constants and static methods.
* These values are defined in the MIDI Standard 1.0
* available from the MIDI Manufacturers Association.
*/
public class MidiConstants {
protected final static String TAG = "MidiTools";
public static final byte STATUS_COMMAND_MASK = (byte) 0xF0;
public static final byte STATUS_CHANNEL_MASK = (byte) 0x0F;
// Channel voice messages.
public static final byte STATUS_NOTE_OFF = (byte) 0x80;
public static final byte STATUS_NOTE_ON = (byte) 0x90;
public static final byte STATUS_POLYPHONIC_AFTERTOUCH = (byte) 0xA0;
public static final byte STATUS_CONTROL_CHANGE = (byte) 0xB0;
public static final byte STATUS_PROGRAM_CHANGE = (byte) 0xC0;
public static final byte STATUS_CHANNEL_PRESSURE = (byte) 0xD0;
public static final byte STATUS_PITCH_BEND = (byte) 0xE0;
// System Common Messages.
public static final byte STATUS_SYSTEM_EXCLUSIVE = (byte) 0xF0;
public static final byte STATUS_MIDI_TIME_CODE = (byte) 0xF1;
public static final byte STATUS_SONG_POSITION = (byte) 0xF2;
public static final byte STATUS_SONG_SELECT = (byte) 0xF3;
public static final byte STATUS_TUNE_REQUEST = (byte) 0xF6;
public static final byte STATUS_END_SYSEX = (byte) 0xF7;
// System Real-Time Messages
public static final byte STATUS_TIMING_CLOCK = (byte) 0xF8;
public static final byte STATUS_START = (byte) 0xFA;
public static final byte STATUS_CONTINUE = (byte) 0xFB;
public static final byte STATUS_STOP = (byte) 0xFC;
public static final byte STATUS_ACTIVE_SENSING = (byte) 0xFE;
public static final byte STATUS_RESET = (byte) 0xFF;
/** Number of bytes in a message nc from 8c to Ec */
public final static int CHANNEL_BYTE_LENGTHS[] = { 3, 3, 3, 3, 2, 2, 3 };
/** Number of bytes in a message Fn from F0 to FF */
public final static int SYSTEM_BYTE_LENGTHS[] = { 1, 2, 3, 2, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1 };
/**
* MIDI messages, except for SysEx, are 1,2 or 3 bytes long.
* You can tell how long a MIDI message is from the first status byte.
* Do not call this for SysEx, which has variable length.
* @param statusByte
* @return number of bytes in a complete message, zero if data byte passed
*/
public static int getBytesPerMessage(byte statusByte) {
// Java bytes are signed so we need to mask off the high bits
// to get a value between 0 and 255.
int statusInt = statusByte & 0xFF;
if (statusInt >= 0xF0) {
// System messages use low nibble for size.
return SYSTEM_BYTE_LENGTHS[statusInt & 0x0F];
} else if(statusInt >= 0x80) {
// Channel voice messages use high nibble for size.
return CHANNEL_BYTE_LENGTHS[(statusInt >> 4) - 8];
} else {
return 0; // data byte
}
}
/**
* @param msg
* @param offset
* @param count
* @return true if the entire message is ActiveSensing commands
*/
public static boolean isAllActiveSensing(byte[] msg, int offset,
int count) {
// Count bytes that are not active sensing.
int goodBytes = 0;
for (int i = 0; i < count; i++) {
byte b = msg[offset + i];
if (b != MidiConstants.STATUS_ACTIVE_SENSING) {
goodBytes++;
}
}
return (goodBytes == 0);
}
}

View File

@ -0,0 +1,152 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiDeviceStatus;
import android.media.midi.MidiManager;
import android.media.midi.MidiManager.DeviceCallback;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import java.util.HashMap;
import java.util.Map;
/**
* Manage a list a of DeviceCallbacks that are called when a MIDI Device is
* plugged in or unplugged.
*
* This class is used to workaround a bug in the M release of the Android MIDI API.
* The MidiManager.unregisterDeviceCallback() method was not working. So if an app
* was rotated, and the Activity destroyed and recreated, the DeviceCallbacks would
* accumulate in the MidiServer. This would result in multiple callbacks whenever a
* device was added. This class allow an app to register and unregister multiple times
* using a local list of callbacks. It registers a single callback, which stays registered
* until the app is dead.
*
* This code checks to see if the N release is being used. N has a fix for the bug.
* For N, the register and unregister calls are passed directly to the MidiManager.
*
* Note that this code is not thread-safe. It should only be called from the UI thread.
*/
public class MidiDeviceMonitor {
public final static String TAG = "MidiDeviceMonitor";
private static MidiDeviceMonitor mInstance;
private MidiManager mMidiManager;
private HashMap<DeviceCallback, Handler> mCallbacks = new HashMap<DeviceCallback,Handler>();
private MyDeviceCallback mMyDeviceCallback;
// We only need the workaround for versions before N.
private boolean mUseProxy = Build.VERSION.SDK_INT <= Build.VERSION_CODES.M;
// Use an inner class so we do not clutter the API of MidiDeviceMonitor
// with public DeviceCallback methods.
protected class MyDeviceCallback extends DeviceCallback {
@Override
public void onDeviceAdded(final MidiDeviceInfo device) {
// Call all of the locally registered callbacks.
for(Map.Entry<DeviceCallback, Handler> item : mCallbacks.entrySet()) {
final DeviceCallback callback = item.getKey();
Handler handler = item.getValue();
if(handler == null) {
callback.onDeviceAdded(device);
} else {
handler.post(new Runnable() {
@Override
public void run() {
callback.onDeviceAdded(device);
}
});
}
}
}
@Override
public void onDeviceRemoved(final MidiDeviceInfo device) {
for(Map.Entry<DeviceCallback, Handler> item : mCallbacks.entrySet()) {
final DeviceCallback callback = item.getKey();
Handler handler = item.getValue();
if(handler == null) {
callback.onDeviceRemoved(device);
} else {
handler.post(new Runnable() {
@Override
public void run() {
callback.onDeviceRemoved(device);
}
});
}
}
}
@Override
public void onDeviceStatusChanged(final MidiDeviceStatus status) {
for(Map.Entry<DeviceCallback, Handler> item : mCallbacks.entrySet()) {
final DeviceCallback callback = item.getKey();
Handler handler = item.getValue();
if(handler == null) {
callback.onDeviceStatusChanged(status);
} else {
handler.post(new Runnable() {
@Override
public void run() {
callback.onDeviceStatusChanged(status);
}
});
}
}
}
}
private MidiDeviceMonitor(MidiManager midiManager) {
mMidiManager = midiManager;
if (mUseProxy) {
Log.i(TAG,"Running on M so we need to use the workaround.");
mMyDeviceCallback = new MyDeviceCallback();
mMidiManager.registerDeviceCallback(mMyDeviceCallback,
new Handler(Looper.getMainLooper()));
}
}
public synchronized static MidiDeviceMonitor getInstance(MidiManager midiManager) {
if (mInstance == null) {
mInstance = new MidiDeviceMonitor(midiManager);
}
return mInstance;
}
public void registerDeviceCallback(DeviceCallback callback, Handler handler) {
if (mUseProxy) {
// Keep our own list of callbacks.
mCallbacks.put(callback, handler);
} else {
mMidiManager.registerDeviceCallback(callback, handler);
}
}
public void unregisterDeviceCallback(DeviceCallback callback) {
if (mUseProxy) {
mCallbacks.remove(callback);
} else {
// This works on N or later.
mMidiManager.unregisterDeviceCallback(callback);
}
}
}

View File

@ -0,0 +1,95 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiReceiver;
import android.media.midi.MidiSender;
import java.io.IOException;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* Utility class for dispatching MIDI data to a list of {@link android.media.midi.MidiReceiver}s.
* This class subclasses {@link android.media.midi.MidiReceiver} and dispatches any data it receives
* to its receiver list. Any receivers that throw an exception upon receiving data will
* be automatically removed from the receiver list, but no IOException will be returned
* from the dispatcher's {@link android.media.midi.MidiReceiver#onReceive} in that case.
*/
public final class MidiDispatcher extends MidiReceiver {
private final CopyOnWriteArrayList<MidiReceiver> mReceivers
= new CopyOnWriteArrayList<MidiReceiver>();
private final MidiSender mSender = new MidiSender() {
/**
* Called to connect a {@link android.media.midi.MidiReceiver} to the sender
*
* @param receiver the receiver to connect
*/
@Override
public void onConnect(MidiReceiver receiver) {
mReceivers.add(receiver);
}
/**
* Called to disconnect a {@link android.media.midi.MidiReceiver} from the sender
*
* @param receiver the receiver to disconnect
*/
@Override
public void onDisconnect(MidiReceiver receiver) {
mReceivers.remove(receiver);
}
};
/**
* Returns the number of {@link android.media.midi.MidiReceiver}s this dispatcher contains.
* @return the number of receivers
*/
public int getReceiverCount() {
return mReceivers.size();
}
/**
* Returns a {@link android.media.midi.MidiSender} which is used to add and remove
* {@link android.media.midi.MidiReceiver}s
* to the dispatcher's receiver list.
* @return the dispatcher's MidiSender
*/
public MidiSender getSender() {
return mSender;
}
@Override
public void onSend(byte[] msg, int offset, int count, long timestamp) throws IOException {
for (MidiReceiver receiver : mReceivers) {
try {
receiver.send(msg, offset, count, timestamp);
} catch (IOException e) {
// if the receiver fails we remove the receiver but do not propagate the exception
mReceivers.remove(receiver);
}
}
}
@Override
public void flush() throws IOException {
for (MidiReceiver receiver : mReceivers) {
receiver.flush();
}
}
}

View File

@ -0,0 +1,118 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiReceiver;
import java.io.IOException;
/**
* Add MIDI Events to an EventScheduler
*/
public class MidiEventScheduler extends EventScheduler {
// Maintain a pool of scheduled events to reduce memory allocation.
// This pool increases performance by about 14%.
private final static int POOL_EVENT_SIZE = 16;
private MidiReceiver mReceiver = new SchedulingReceiver();
private class SchedulingReceiver extends MidiReceiver
{
/**
* Store these bytes in the EventScheduler to be delivered at the specified
* time.
*/
@Override
public void onSend(byte[] msg, int offset, int count, long timestamp)
throws IOException {
MidiEvent event = createScheduledEvent(msg, offset, count, timestamp);
if (event != null) {
add(event);
}
}
}
public static class MidiEvent extends SchedulableEvent {
public int count = 0;
public byte[] data;
private MidiEvent(int count) {
super(0);
data = new byte[count];
}
private MidiEvent(byte[] msg, int offset, int count, long timestamp) {
super(timestamp);
data = new byte[count];
System.arraycopy(msg, offset, data, 0, count);
this.count = count;
}
@Override
public String toString() {
String text = "Event: ";
for (int i = 0; i < count; i++) {
text += data[i] + ", ";
}
return text;
}
}
/**
* Create an event that contains the message.
*/
private MidiEvent createScheduledEvent(byte[] msg, int offset, int count,
long timestamp) {
MidiEvent event;
if (count > POOL_EVENT_SIZE) {
event = new MidiEvent(msg, offset, count, timestamp);
} else {
event = (MidiEvent) removeEventfromPool();
if (event == null) {
event = new MidiEvent(POOL_EVENT_SIZE);
}
System.arraycopy(msg, offset, event.data, 0, count);
event.count = count;
event.setTimestamp(timestamp);
}
return event;
}
/**
* Return events to a pool so they can be reused.
*
* @param event
*/
@Override
public void addEventToPool(SchedulableEvent event) {
// Make sure the event is suitable for the pool.
if (event instanceof MidiEvent) {
MidiEvent midiEvent = (MidiEvent) event;
if (midiEvent.data.length == POOL_EVENT_SIZE) {
super.addEventToPool(event);
}
}
}
/**
* This MidiReceiver will write date to the scheduling buffer.
* @return the MidiReceiver
*/
public MidiReceiver getReceiver() {
return mReceiver;
}
}

View File

@ -0,0 +1,89 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiSender;
import android.util.Log;
import java.io.IOException;
public class MidiEventThread extends MidiEventScheduler {
private EventThread mEventThread;
MidiDispatcher mDispatcher = new MidiDispatcher();
class EventThread extends Thread {
private boolean go = true;
@Override
public void run() {
while (go) {
try {
MidiEvent event = (MidiEvent) waitNextEvent();
try {
Log.i(MidiConstants.TAG, "Fire event " + event.data[0] + " at "
+ event.getTimestamp());
mDispatcher.send(event.data, 0,
event.count, event.getTimestamp());
} catch (IOException e) {
e.printStackTrace();
}
// Put event back in the pool for future use.
addEventToPool(event);
} catch (InterruptedException e) {
// OK, this is how we stop the thread.
}
}
}
/**
* Asynchronously tell the thread to stop.
*/
public void requestStop() {
go = false;
interrupt();
}
}
public void start() {
stop();
mEventThread = new EventThread();
mEventThread.start();
}
/**
* Asks the thread to stop then waits for it to stop.
*/
public void stop() {
if (mEventThread != null) {
mEventThread.requestStop();
try {
mEventThread.join(500);
} catch (InterruptedException e) {
Log.e(MidiConstants.TAG,
"Interrupted while waiting for MIDI EventScheduler thread to stop.");
} finally {
mEventThread = null;
}
}
}
public MidiSender getSender() {
return mDispatcher.getSender();
}
}

View File

@ -0,0 +1,111 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiReceiver;
import java.io.IOException;
/**
* Convert stream of arbitrary MIDI bytes into discrete messages.
*
* Parses the incoming bytes and then posts individual messages to the receiver
* specified in the constructor. Short messages of 1-3 bytes will be complete.
* System Exclusive messages may be posted in pieces.
*
* Resolves Running Status and interleaved System Real-Time messages.
*/
public class MidiFramer extends MidiReceiver {
private MidiReceiver mReceiver;
private byte[] mBuffer = new byte[3];
private int mCount;
private byte mRunningStatus;
private int mNeeded;
private boolean mInSysEx;
public MidiFramer(MidiReceiver receiver) {
mReceiver = receiver;
}
/*
* @see android.midi.MidiReceiver#onSend(byte[], int, int, long)
*/
@Override
public void onSend(byte[] data, int offset, int count, long timestamp)
throws IOException {
int sysExStartOffset = (mInSysEx ? offset : -1);
for (int i = 0; i < count; i++) {
final byte currentByte = data[offset];
final int currentInt = currentByte & 0xFF;
if (currentInt >= 0x80) { // status byte?
if (currentInt < 0xF0) { // channel message?
mRunningStatus = currentByte;
mCount = 1;
mNeeded = MidiConstants.getBytesPerMessage(currentByte) - 1;
} else if (currentInt < 0xF8) { // system common?
if (currentInt == 0xF0 /* SysEx Start */) {
// Log.i(TAG, "SysEx Start");
mInSysEx = true;
sysExStartOffset = offset;
} else if (currentInt == 0xF7 /* SysEx End */) {
// Log.i(TAG, "SysEx End");
if (mInSysEx) {
mReceiver.send(data, sysExStartOffset,
offset - sysExStartOffset + 1, timestamp);
mInSysEx = false;
sysExStartOffset = -1;
}
} else {
mBuffer[0] = currentByte;
mRunningStatus = 0;
mCount = 1;
mNeeded = MidiConstants.getBytesPerMessage(currentByte) - 1;
}
} else { // real-time?
// Single byte message interleaved with other data.
if (mInSysEx) {
mReceiver.send(data, sysExStartOffset,
offset - sysExStartOffset, timestamp);
sysExStartOffset = offset + 1;
}
mReceiver.send(data, offset, 1, timestamp);
}
} else { // data byte
if (!mInSysEx) {
mBuffer[mCount++] = currentByte;
if (--mNeeded == 0) {
if (mRunningStatus != 0) {
mBuffer[0] = mRunningStatus;
}
mReceiver.send(mBuffer, 0, mCount, timestamp);
mNeeded = MidiConstants.getBytesPerMessage(mBuffer[0]) - 1;
mCount = 1;
}
}
}
++offset;
}
// send any accumulatedSysEx data
if (sysExStartOffset >= 0 && sysExStartOffset < offset) {
mReceiver.send(data, sysExStartOffset,
offset - sysExStartOffset, timestamp);
}
}
}

View File

@ -0,0 +1,92 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.app.Activity;
import android.media.midi.MidiDevice;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiInputPort;
import android.media.midi.MidiManager;
import android.media.midi.MidiReceiver;
import android.util.Log;
import java.io.IOException;
/**
* Manages a Spinner for selecting a MidiInputPort.
*/
public class MidiInputPortSelector extends MidiPortSelector {
private MidiInputPort mInputPort;
private MidiDevice mOpenDevice;
/**
* @param midiManager
* @param activity
* @param spinnerId ID from the layout resource
*/
public MidiInputPortSelector(MidiManager midiManager, Activity activity,
int spinnerId) {
super(midiManager, activity, spinnerId, MidiDeviceInfo.PortInfo.TYPE_INPUT);
}
@Override
public void onPortSelected(final MidiPortWrapper wrapper) {
close();
final MidiDeviceInfo info = wrapper.getDeviceInfo();
if (info != null) {
mMidiManager.openDevice(info, new MidiManager.OnDeviceOpenedListener() {
@Override
public void onDeviceOpened(MidiDevice device) {
if (device == null) {
Log.e(MidiConstants.TAG, "could not open " + info);
} else {
mOpenDevice = device;
mInputPort = mOpenDevice.openInputPort(
wrapper.getPortIndex());
if (mInputPort == null) {
Log.e(MidiConstants.TAG, "could not open input port on " + info);
}
}
}
}, null);
// Don't run the callback on the UI thread because openInputPort might take a while.
}
}
public MidiReceiver getReceiver() {
return mInputPort;
}
@Override
public void onClose() {
try {
if (mInputPort != null) {
Log.i(MidiConstants.TAG, "MidiInputPortSelector.onClose() - close port");
mInputPort.close();
}
mInputPort = null;
if (mOpenDevice != null) {
mOpenDevice.close();
}
mOpenDevice = null;
} catch (IOException e) {
Log.e(MidiConstants.TAG, "cleanup failed", e);
}
super.onClose();
}
}

View File

@ -0,0 +1,91 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.app.Activity;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiManager;
import android.util.Log;
import java.io.IOException;
/**
* Select an output port and connect it to a destination input port.
*/
public class MidiOutputPortConnectionSelector extends MidiPortSelector {
public final static String TAG = "MidiOutputPortConnectionSelector";
private MidiPortConnector mSynthConnector;
private MidiDeviceInfo mDestinationDeviceInfo;
private int mDestinationPortIndex;
private MidiPortWrapper mLastWrapper;
private MidiPortConnector.OnPortsConnectedListener mConnectedListener;
/**
* Create a selector for connecting to the destination input port.
*
* @param midiManager
* @param activity
* @param spinnerId
* @param destinationDeviceInfo
* @param destinationPortIndex
*/
public MidiOutputPortConnectionSelector(MidiManager midiManager,
Activity activity, int spinnerId,
MidiDeviceInfo destinationDeviceInfo, int destinationPortIndex) {
super(midiManager, activity, spinnerId,
MidiDeviceInfo.PortInfo.TYPE_OUTPUT);
mDestinationDeviceInfo = destinationDeviceInfo;
mDestinationPortIndex = destinationPortIndex;
}
@Override
public void onPortSelected(final MidiPortWrapper wrapper) {
if(!wrapper.equals(mLastWrapper)) {
onClose();
if (wrapper.getDeviceInfo() != null) {
mSynthConnector = new MidiPortConnector(mMidiManager);
mSynthConnector.connectToDevicePort(wrapper.getDeviceInfo(),
wrapper.getPortIndex(), mDestinationDeviceInfo,
mDestinationPortIndex,
// not safe on UI thread
mConnectedListener, null);
}
}
mLastWrapper = wrapper;
}
@Override
public void onClose() {
try {
if (mSynthConnector != null) {
mSynthConnector.close();
mSynthConnector = null;
}
} catch (IOException e) {
Log.e(MidiConstants.TAG, "Exception in closeSynthResources()", e);
}
super.onClose();
}
/**
* @param connectedListener
*/
public void setConnectedListener(
MidiPortConnector.OnPortsConnectedListener connectedListener) {
mConnectedListener = connectedListener;
}
}

View File

@ -0,0 +1,102 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.app.Activity;
import android.media.midi.MidiDevice;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiManager;
import android.media.midi.MidiOutputPort;
import android.media.midi.MidiSender;
import android.util.Log;
import java.io.IOException;
/**
* Manages a Spinner for selecting a MidiOutputPort.
*/
public class MidiOutputPortSelector extends MidiPortSelector {
public final static String TAG = "MidiOutputPortSelector";
private MidiOutputPort mOutputPort;
private MidiDispatcher mDispatcher = new MidiDispatcher();
private MidiDevice mOpenDevice;
/**
* @param midiManager
* @param activity
* @param spinnerId ID from the layout resource
*/
public MidiOutputPortSelector(MidiManager midiManager, Activity activity,
int spinnerId) {
super(midiManager, activity, spinnerId, MidiDeviceInfo.PortInfo.TYPE_OUTPUT);
}
@Override
public void onPortSelected(final MidiPortWrapper wrapper) {
close();
final MidiDeviceInfo info = wrapper.getDeviceInfo();
if (info != null) {
mMidiManager.openDevice(info, new MidiManager.OnDeviceOpenedListener() {
@Override
public void onDeviceOpened(MidiDevice device) {
if (device == null) {
Log.e(MidiConstants.TAG, "could not open " + info);
} else {
mOpenDevice = device;
mOutputPort = device.openOutputPort(wrapper.getPortIndex());
if (mOutputPort == null) {
Log.e(MidiConstants.TAG,
"could not open output port for " + info);
return;
}
mOutputPort.connect(mDispatcher);
}
}
}, null);
// Don't run the callback on the UI thread because openOutputPort might take a while.
}
}
@Override
public void onClose() {
try {
if (mOutputPort != null) {
mOutputPort.disconnect(mDispatcher);
}
mOutputPort = null;
if (mOpenDevice != null) {
mOpenDevice.close();
}
mOpenDevice = null;
} catch (IOException e) {
Log.e(MidiConstants.TAG, "cleanup failed", e);
}
super.onClose();
}
/**
* You can connect your MidiReceivers to this sender. The user will then select which output
* port will send messages through this MidiSender.
* @return a MidiSender that will send the messages from the selected port.
*/
public MidiSender getSender() {
return mDispatcher.getSender();
}
}

View File

@ -0,0 +1,203 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiDevice;
import android.media.midi.MidiDevice.MidiConnection;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiInputPort;
import android.media.midi.MidiManager;
import android.os.Handler;
import android.util.Log;
import java.io.IOException;
/**
* Tool for connecting MIDI ports on two remote devices.
*/
public class MidiPortConnector {
private final MidiManager mMidiManager;
private MidiDevice mSourceDevice;
private MidiDevice mDestinationDevice;
private MidiConnection mConnection;
/**
* @param midiManager
*/
public MidiPortConnector(MidiManager midiManager) {
mMidiManager = midiManager;
}
public void close() throws IOException {
if (mConnection != null) {
Log.i(MidiConstants.TAG,
"MidiPortConnector closing connection " + mConnection);
mConnection.close();
mConnection = null;
}
if (mSourceDevice != null) {
mSourceDevice.close();
mSourceDevice = null;
}
if (mDestinationDevice != null) {
mDestinationDevice.close();
mDestinationDevice = null;
}
}
private void safeClose() {
try {
close();
} catch (IOException e) {
Log.e(MidiConstants.TAG, "could not close resources", e);
}
}
/**
* Listener class used for receiving the results of
* {@link #connectToDevicePort}
*/
public interface OnPortsConnectedListener {
/**
* Called to respond to a {@link #connectToDevicePort} request
*
* @param connection
* a {@link MidiConnection} that represents the connected
* ports, or null if connection failed
*/
abstract public void onPortsConnected(MidiConnection connection);
}
/**
* Open two devices and connect their ports.
*
* @param sourceDeviceInfo
* @param sourcePortIndex
* @param destinationDeviceInfo
* @param destinationPortIndex
*/
public void connectToDevicePort(final MidiDeviceInfo sourceDeviceInfo,
final int sourcePortIndex,
final MidiDeviceInfo destinationDeviceInfo,
final int destinationPortIndex) {
connectToDevicePort(sourceDeviceInfo, sourcePortIndex,
destinationDeviceInfo, destinationPortIndex, null, null);
}
/**
* Open two devices and connect their ports.
* Then notify listener of the result.
*
* @param sourceDeviceInfo
* @param sourcePortIndex
* @param destinationDeviceInfo
* @param destinationPortIndex
* @param listener
* @param handler
*/
public void connectToDevicePort(final MidiDeviceInfo sourceDeviceInfo,
final int sourcePortIndex,
final MidiDeviceInfo destinationDeviceInfo,
final int destinationPortIndex,
final OnPortsConnectedListener listener, final Handler handler) {
safeClose();
mMidiManager.openDevice(destinationDeviceInfo,
new MidiManager.OnDeviceOpenedListener() {
@Override
public void onDeviceOpened(MidiDevice destinationDevice) {
if (destinationDevice == null) {
Log.e(MidiConstants.TAG,
"could not open " + destinationDeviceInfo);
if (listener != null) {
listener.onPortsConnected(null);
}
} else {
mDestinationDevice = destinationDevice;
Log.i(MidiConstants.TAG,
"connectToDevicePort opened "
+ destinationDeviceInfo);
// Destination device was opened so go to next step.
MidiInputPort destinationInputPort = destinationDevice
.openInputPort(destinationPortIndex);
if (destinationInputPort != null) {
Log.i(MidiConstants.TAG,
"connectToDevicePort opened port on "
+ destinationDeviceInfo);
connectToDevicePort(sourceDeviceInfo,
sourcePortIndex,
destinationInputPort,
listener, handler);
} else {
Log.e(MidiConstants.TAG,
"could not open port on "
+ destinationDeviceInfo);
safeClose();
if (listener != null) {
listener.onPortsConnected(null);
}
}
}
}
}, handler);
}
/**
* Open a source device and connect its output port to the
* destinationInputPort.
*
* @param sourceDeviceInfo
* @param sourcePortIndex
* @param destinationInputPort
*/
private void connectToDevicePort(final MidiDeviceInfo sourceDeviceInfo,
final int sourcePortIndex,
final MidiInputPort destinationInputPort,
final OnPortsConnectedListener listener, final Handler handler) {
mMidiManager.openDevice(sourceDeviceInfo,
new MidiManager.OnDeviceOpenedListener() {
@Override
public void onDeviceOpened(MidiDevice device) {
if (device == null) {
Log.e(MidiConstants.TAG,
"could not open " + sourceDeviceInfo);
safeClose();
if (listener != null) {
listener.onPortsConnected(null);
}
} else {
Log.i(MidiConstants.TAG,
"connectToDevicePort opened "
+ sourceDeviceInfo);
// Device was opened so connect the ports.
mSourceDevice = device;
mConnection = device.connectPorts(
destinationInputPort, sourcePortIndex);
if (mConnection == null) {
Log.e(MidiConstants.TAG, "could not connect to "
+ sourceDeviceInfo);
safeClose();
}
if (listener != null) {
listener.onPortsConnected(mConnection);
}
}
}
}, handler);
}
}

View File

@ -0,0 +1,191 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.app.Activity;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiDeviceStatus;
import android.media.midi.MidiManager;
import android.media.midi.MidiManager.DeviceCallback;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import java.util.HashSet;
/**
* Base class that uses a Spinner to select available MIDI ports.
*/
public abstract class MidiPortSelector extends DeviceCallback {
private int mType = MidiDeviceInfo.PortInfo.TYPE_INPUT;
protected ArrayAdapter<MidiPortWrapper> mAdapter;
protected HashSet<MidiPortWrapper> mBusyPorts = new HashSet<MidiPortWrapper>();
private Spinner mSpinner;
protected MidiManager mMidiManager;
protected Activity mActivity;
private MidiPortWrapper mCurrentWrapper;
/**
* @param midiManager
* @param activity
* @param spinnerId
* ID from the layout resource
* @param type
* TYPE_INPUT or TYPE_OUTPUT
*/
public MidiPortSelector(MidiManager midiManager, Activity activity,
int spinnerId, int type) {
mMidiManager = midiManager;
mActivity = activity;
mType = type;
mAdapter = new ArrayAdapter<MidiPortWrapper>(activity,
android.R.layout.simple_spinner_item);
mAdapter.setDropDownViewResource(
android.R.layout.simple_spinner_dropdown_item);
mAdapter.add(new MidiPortWrapper(null, 0, 0));
mSpinner = (Spinner) activity.findViewById(spinnerId);
mSpinner.setOnItemSelectedListener(
new AdapterView.OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> parent, View view,
int pos, long id) {
mCurrentWrapper = mAdapter.getItem(pos);
onPortSelected(mCurrentWrapper);
}
public void onNothingSelected(AdapterView<?> parent) {
onPortSelected(null);
mCurrentWrapper = null;
}
});
mSpinner.setAdapter(mAdapter);
MidiDeviceMonitor.getInstance(mMidiManager).registerDeviceCallback(this,
new Handler(Looper.getMainLooper()));
MidiDeviceInfo[] infos = mMidiManager.getDevices();
for (MidiDeviceInfo info : infos) {
onDeviceAdded(info);
}
}
/**
* Set to no port selected.
*/
public void clearSelection() {
mSpinner.setSelection(0);
}
private int getInfoPortCount(final MidiDeviceInfo info) {
int portCount = (mType == MidiDeviceInfo.PortInfo.TYPE_INPUT)
? info.getInputPortCount() : info.getOutputPortCount();
return portCount;
}
@Override
public void onDeviceAdded(final MidiDeviceInfo info) {
int portCount = getInfoPortCount(info);
for (int i = 0; i < portCount; ++i) {
MidiPortWrapper wrapper = new MidiPortWrapper(info, mType, i);
mAdapter.add(wrapper);
Log.i(MidiConstants.TAG, wrapper + " was added to " + this);
mAdapter.notifyDataSetChanged();
}
}
@Override
public void onDeviceRemoved(final MidiDeviceInfo info) {
int portCount = getInfoPortCount(info);
for (int i = 0; i < portCount; ++i) {
MidiPortWrapper wrapper = new MidiPortWrapper(info, mType, i);
MidiPortWrapper currentWrapper = mCurrentWrapper;
mAdapter.remove(wrapper);
// If the currently selected port was removed then select no port.
if (wrapper.equals(currentWrapper)) {
clearSelection();
}
mAdapter.notifyDataSetChanged();
Log.i(MidiConstants.TAG, wrapper + " was removed");
}
}
@Override
public void onDeviceStatusChanged(final MidiDeviceStatus status) {
// If an input port becomes busy then remove it from the menu.
// If it becomes free then add it back to the menu.
if (mType == MidiDeviceInfo.PortInfo.TYPE_INPUT) {
MidiDeviceInfo info = status.getDeviceInfo();
Log.i(MidiConstants.TAG, "MidiPortSelector.onDeviceStatusChanged status = " + status
+ ", mType = " + mType
+ ", activity = " + mActivity.getPackageName()
+ ", info = " + info);
// Look for transitions from free to busy.
int portCount = info.getInputPortCount();
for (int i = 0; i < portCount; ++i) {
MidiPortWrapper wrapper = new MidiPortWrapper(info, mType, i);
if (!wrapper.equals(mCurrentWrapper)) {
if (status.isInputPortOpen(i)) { // busy?
if (!mBusyPorts.contains(wrapper)) {
// was free, now busy
mBusyPorts.add(wrapper);
mAdapter.remove(wrapper);
mAdapter.notifyDataSetChanged();
}
} else {
if (mBusyPorts.remove(wrapper)) {
// was busy, now free
mAdapter.add(wrapper);
mAdapter.notifyDataSetChanged();
}
}
}
}
}
}
/**
* Implement this method to handle the user selecting a port on a device.
*
* @param wrapper
*/
public abstract void onPortSelected(MidiPortWrapper wrapper);
/**
* Implement this method to clean up any open resources.
*/
public void onClose() {
}
/**
* Implement this method to clean up any open resources.
*/
public void onDestroy() {
MidiDeviceMonitor.getInstance(mMidiManager).unregisterDeviceCallback(this);
}
/**
*
*/
public void close() {
onClose();
}
}

View File

@ -0,0 +1,123 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiDeviceInfo.PortInfo;
public class MidiPortWrapper {
private MidiDeviceInfo mInfo;
private int mPortIndex;
private int mType;
private String mString;
/**
* Wrapper for a MIDI device and port description.
* @param info
* @param portType
* @param portIndex
*/
public MidiPortWrapper(MidiDeviceInfo info, int portType, int portIndex) {
mInfo = info;
mType = portType;
mPortIndex = portIndex;
}
private void updateString() {
if (mInfo == null) {
mString = "- - - - - -";
} else {
StringBuilder sb = new StringBuilder();
String name = mInfo.getProperties()
.getString(MidiDeviceInfo.PROPERTY_NAME);
if (name == null) {
name = mInfo.getProperties()
.getString(MidiDeviceInfo.PROPERTY_MANUFACTURER) + ", "
+ mInfo.getProperties()
.getString(MidiDeviceInfo.PROPERTY_PRODUCT);
}
sb.append("#" + mInfo.getId());
sb.append(", ").append(name);
PortInfo portInfo = findPortInfo();
sb.append("[" + mPortIndex + "]");
if (portInfo != null) {
sb.append(", ").append(portInfo.getName());
} else {
sb.append(", null");
}
mString = sb.toString();
}
}
/**
* @param info
* @param portIndex
* @return
*/
private PortInfo findPortInfo() {
PortInfo[] ports = mInfo.getPorts();
for (PortInfo portInfo : ports) {
if (portInfo.getPortNumber() == mPortIndex
&& portInfo.getType() == mType) {
return portInfo;
}
}
return null;
}
public int getPortIndex() {
return mPortIndex;
}
public MidiDeviceInfo getDeviceInfo() {
return mInfo;
}
@Override
public String toString() {
if (mString == null) {
updateString();
}
return mString;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (!(other instanceof MidiPortWrapper))
return false;
MidiPortWrapper otherWrapper = (MidiPortWrapper) other;
if (mPortIndex != otherWrapper.mPortIndex)
return false;
if (mType != otherWrapper.mType)
return false;
if (mInfo == null)
return (otherWrapper.mInfo == null);
return mInfo.equals(otherWrapper.mInfo);
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = 31 * hashCode + mPortIndex;
hashCode = 31 * hashCode + mType;
hashCode = 31 * hashCode + mInfo.hashCode();
return hashCode;
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.media.midi.MidiDeviceInfo;
import android.media.midi.MidiManager;
/**
* Miscellaneous tools for Android MIDI.
*/
public class MidiTools {
/**
* @return a device that matches the manufacturer and product or null
*/
public static MidiDeviceInfo findDevice(MidiManager midiManager,
String manufacturer, String product) {
for (MidiDeviceInfo info : midiManager.getDevices()) {
String deviceManufacturer = info.getProperties()
.getString(MidiDeviceInfo.PROPERTY_MANUFACTURER);
if ((manufacturer != null)
&& manufacturer.equals(deviceManufacturer)) {
String deviceProduct = info.getProperties()
.getString(MidiDeviceInfo.PROPERTY_PRODUCT);
if ((product != null) && product.equals(deviceProduct)) {
return info;
}
}
}
return null;
}
}

View File

@ -0,0 +1,384 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import java.util.ArrayList;
import java.util.HashMap;
/**
* View that displays a traditional piano style keyboard. Finger presses are reported to a
* MusicKeyListener. Keys that pressed are highlighted. Running a finger along the top of the
* keyboard will only hit black keys. Running a finger along the bottom of the keyboard will only
* hit white keys.
*/
public class MusicKeyboardView extends View {
// Adjust proportions of the keys.
private static final int WHITE_KEY_GAP = 10;
private static final int PITCH_MIDDLE_C = 60;
private static final int NOTES_PER_OCTAVE = 12;
private static final int[] WHITE_KEY_OFFSETS = {
0, 2, 4, 5, 7, 9, 11
};
private static final double BLACK_KEY_HEIGHT_FACTOR = 0.60;
private static final double BLACK_KEY_WIDTH_FACTOR = 0.6;
private static final double BLACK_KEY_OFFSET_FACTOR = 0.18;
private static final int[] BLACK_KEY_HORIZONTAL_OFFSETS = {
-1, 1, -1, 0, 1
};
private static final boolean[] NOTE_IN_OCTAVE_IS_BLACK = {
false, true,
false, true,
false, false, true,
false, true,
false, true,
false
};
// Preferences
private int mNumKeys;
private int mNumPortraitKeys = NOTES_PER_OCTAVE + 1;
private int mNumLandscapeKeys = (2 * NOTES_PER_OCTAVE) + 1;
private int mNumWhiteKeys = 15;
// Geometry.
private int mWidth;
private int mHeight;
private int mWhiteKeyWidth;
private double mBlackKeyWidth;
// Y position of bottom of black keys.
private int mBlackBottom;
private Rect[] mBlackKeyRectangles;
// Keyboard state
private boolean[] mNotesOnByPitch = new boolean[128];
// Appearance
private Paint mShadowPaint;
private Paint mBlackOnKeyPaint;
private Paint mBlackOffKeyPaint;
private Paint mWhiteOnKeyPaint;
private Paint mWhiteOffKeyPaint;
private boolean mLegato = true;
private HashMap<Integer, Integer> mFingerMap = new HashMap<Integer, Integer>();
// Note number for the left most key.
private int mLowestPitch = PITCH_MIDDLE_C - NOTES_PER_OCTAVE;
private ArrayList<MusicKeyListener> mListeners = new ArrayList<MusicKeyListener>();
/** Implement this to receive keyboard events. */
public interface MusicKeyListener {
/** This will be called when a key is pressed. */
public void onKeyDown(int keyIndex);
/** This will be called when a key is pressed. */
public void onKeyUp(int keyIndex);
}
public MusicKeyboardView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
void init() {
mShadowPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mShadowPaint.setStyle(Paint.Style.FILL);
mShadowPaint.setColor(0xFF707070);
mBlackOnKeyPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mBlackOnKeyPaint.setStyle(Paint.Style.FILL);
mBlackOnKeyPaint.setColor(0xFF2020E0);
mBlackOffKeyPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mBlackOffKeyPaint.setStyle(Paint.Style.FILL);
mBlackOffKeyPaint.setColor(0xFF202020);
mWhiteOnKeyPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mWhiteOnKeyPaint.setStyle(Paint.Style.FILL);
mWhiteOnKeyPaint.setColor(0xFF6060F0);
mWhiteOffKeyPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mWhiteOffKeyPaint.setStyle(Paint.Style.FILL);
mWhiteOffKeyPaint.setColor(0xFFF0F0F0);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
mWidth = w;
mHeight = h;
mNumKeys = (mHeight > mWidth) ? mNumPortraitKeys : mNumLandscapeKeys;
mNumWhiteKeys = 0;
// Count white keys.
for (int i = 0; i < mNumKeys; i++) {
int pitch = mLowestPitch + i;
if (!isPitchBlack(pitch)) {
mNumWhiteKeys++;
}
}
mWhiteKeyWidth = mWidth / mNumWhiteKeys;
mBlackKeyWidth = mWhiteKeyWidth * BLACK_KEY_WIDTH_FACTOR;
mBlackBottom = (int) (mHeight * BLACK_KEY_HEIGHT_FACTOR);
makeBlackRectangles();
}
private void makeBlackRectangles() {
int top = 0;
ArrayList<Rect> rectangles = new ArrayList<Rect>();
int whiteKeyIndex = 0;
int blackKeyIndex = 0;
for (int i = 0; i < mNumKeys; i++) {
int x = mWhiteKeyWidth * whiteKeyIndex;
int pitch = mLowestPitch + i;
int note = pitch % NOTES_PER_OCTAVE;
if (NOTE_IN_OCTAVE_IS_BLACK[note]) {
double offset = BLACK_KEY_OFFSET_FACTOR
* BLACK_KEY_HORIZONTAL_OFFSETS[blackKeyIndex % 5];
int left = (int) (x - mBlackKeyWidth * (0.6 - offset));
left += WHITE_KEY_GAP / 2;
int right = (int) (left + mBlackKeyWidth);
Rect rect = new Rect(left, top, right, mBlackBottom);
rectangles.add(rect);
blackKeyIndex++;
} else {
whiteKeyIndex++;
}
}
mBlackKeyRectangles = rectangles.toArray(new Rect[0]);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
int whiteKeyIndex = 0;
canvas.drawRect(0, 0, mWidth, mHeight, mShadowPaint);
// Draw white keys first.
for (int i = 0; i < mNumKeys; i++) {
int pitch = mLowestPitch + i;
int note = pitch % NOTES_PER_OCTAVE;
if (!NOTE_IN_OCTAVE_IS_BLACK[note]) {
int x = (mWhiteKeyWidth * whiteKeyIndex) + (WHITE_KEY_GAP / 2);
Paint paint = mNotesOnByPitch[pitch] ? mWhiteOnKeyPaint
: mWhiteOffKeyPaint;
canvas.drawRect(x, 0, x + mWhiteKeyWidth - WHITE_KEY_GAP, mHeight,
paint);
whiteKeyIndex++;
}
}
// Then draw black keys over the white keys.
int blackKeyIndex = 0;
for (int i = 0; i < mNumKeys; i++) {
int pitch = mLowestPitch + i;
int note = pitch % NOTES_PER_OCTAVE;
if (NOTE_IN_OCTAVE_IS_BLACK[note]) {
Rect r = mBlackKeyRectangles[blackKeyIndex];
Paint paint = mNotesOnByPitch[pitch] ? mBlackOnKeyPaint
: mBlackOffKeyPaint;
canvas.drawRect(r, paint);
blackKeyIndex++;
}
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
int action = event.getActionMasked();
// Track individual fingers.
int pointerIndex = event.getActionIndex();
int id = event.getPointerId(pointerIndex);
// Get the pointer's current position
float x = event.getX(pointerIndex);
float y = event.getY(pointerIndex);
switch (action) {
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_POINTER_DOWN:
onFingerDown(id, x, y);
break;
case MotionEvent.ACTION_MOVE:
onFingerMove(id, x, y);
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
onFingerUp(id, x, y);
break;
}
// Must return true or we do not get the ACTION_MOVE and
// ACTION_UP events.
return true;
}
private void onFingerDown(int id, float x, float y) {
int pitch = xyToPitch(x, y);
fireKeyDown(pitch);
mFingerMap.put(id, pitch);
}
private void onFingerMove(int id, float x, float y) {
Integer previousPitch = mFingerMap.get(id);
if (previousPitch != null) {
int pitch = -1;
if (y < mBlackBottom) {
// Only hit black keys if above line.
pitch = xyToBlackPitch(x, y);
} else {
pitch = xToWhitePitch(x);
}
// Did we change to a new key.
if ((pitch >= 0) && (pitch != previousPitch)) {
if (mLegato) {
fireKeyDown(pitch);
fireKeyUp(previousPitch);
} else {
fireKeyUp(previousPitch);
fireKeyDown(pitch);
}
mFingerMap.put(id, pitch);
}
}
}
private void onFingerUp(int id, float x, float y) {
Integer previousPitch = mFingerMap.get(id);
if (previousPitch != null) {
fireKeyUp(previousPitch);
mFingerMap.remove(id);
} else {
int pitch = xyToPitch(x, y);
fireKeyUp(pitch);
}
}
private void fireKeyDown(int pitch) {
for (MusicKeyListener listener : mListeners) {
listener.onKeyDown(pitch);
}
mNotesOnByPitch[pitch] = true;
invalidate();
}
private void fireKeyUp(int pitch) {
for (MusicKeyListener listener : mListeners) {
listener.onKeyUp(pitch);
}
mNotesOnByPitch[pitch] = false;
invalidate();
}
private int xyToPitch(float x, float y) {
int pitch = -1;
if (y < mBlackBottom) {
pitch = xyToBlackPitch(x, y);
}
if (pitch < 0) {
pitch = xToWhitePitch(x);
}
return pitch;
}
private boolean isPitchBlack(int pitch) {
int note = pitch % NOTES_PER_OCTAVE;
return NOTE_IN_OCTAVE_IS_BLACK[note];
}
// Convert x to MIDI pitch. Ignores black keys.
private int xToWhitePitch(float x) {
int whiteKeyIndex = (int) (x / mWhiteKeyWidth);
int octave = whiteKeyIndex / WHITE_KEY_OFFSETS.length;
int indexInOctave = whiteKeyIndex - (octave * WHITE_KEY_OFFSETS.length);
int pitch = mLowestPitch + (octave * NOTES_PER_OCTAVE) +
WHITE_KEY_OFFSETS[indexInOctave];
return pitch;
}
// Convert x to MIDI pitch. Ignores white keys.
private int xyToBlackPitch(float x, float y) {
int result = -1;
int blackKeyIndex = 0;
for (int i = 0; i < mNumKeys; i++) {
int pitch = mLowestPitch + i;
if (isPitchBlack(pitch)) {
Rect rect = mBlackKeyRectangles[blackKeyIndex];
if (rect.contains((int) x, (int) y)) {
result = pitch;
break;
}
blackKeyIndex++;
}
}
return result;
}
public void addMusicKeyListener(MusicKeyListener musicKeyListener) {
mListeners.add(musicKeyListener);
}
public void removeMusicKeyListener(MusicKeyListener musicKeyListener) {
mListeners.remove(musicKeyListener);
}
/**
* Set the pitch of the lowest, leftmost key. If you set it to a black key then it will get
* adjusted upwards to a white key. Forces a redraw.
*/
public void setLowestPitch(int pitch) {
if (isPitchBlack(pitch)) {
pitch++; // force to next white key
}
mLowestPitch = pitch;
postInvalidate();
}
public int getLowestPitch() {
return mLowestPitch;
}
/**
* Set the number of white keys in portrait mode.
*/
public void setNumPortraitKeys(int numPortraitKeys) {
mNumPortraitKeys = numPortraitKeys;
postInvalidate();
}
public int getNumPortraitKeys() {
return mNumPortraitKeys;
}
/**
* Set the number of white keys in landscape mode.
*/
public void setNumLandscapeKeys(int numLandscapeKeys) {
mNumLandscapeKeys = numLandscapeKeys;
postInvalidate();
}
public int getNumLandscapeKeys() {
return mNumLandscapeKeys;
}
}

View File

@ -0,0 +1,235 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools.synth;
import android.media.AudioAttributes;
import android.media.AudioTrack;
import android.util.Log;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* Optimize the buffer size for an AudioTrack based on the underrun count.
* <p/>
* This feature was added in N. So we check for the methods using reflection.
* If you are targeting N or later then you could just call the new methods directly.
*/
public class AudioLatencyTuner {
private static final String TAG = "AudioLatencyTuner";
private static final int STATE_PRIMING = 0;
private static final int STATE_LOWERING = 1;
private static final int STATE_RAISING = 2;
private static boolean mLowLatencySupported; // N or later?
// These are found using reflection.
private static int mFlagLowLatency; // AudioAttributes.FLAG_LOW_LATENCY
private static Method mSetBufferSizeMethod = null;
private static Method mGetBufferCapacityMethod = null;
private static Method mGetUnderrunCountMethod = null;
private final int mInitialSize;
private final AudioTrack mAudioTrack;
private final int mFramesPerBlock;
private int mState = STATE_PRIMING;
private int mPreviousUnderrunCount;
static {
reflectAdvancedMethods();
}
public AudioLatencyTuner(AudioTrack track, int framesPerBlock) {
mAudioTrack = track;
mInitialSize = track.getBufferSizeInFrames();
mFramesPerBlock = framesPerBlock;
reset();
}
/**
* Use Java reflection to find the methods added in the N release.
*/
private static void reflectAdvancedMethods() {
try {
Field field = AudioAttributes.class.getField("FLAG_LOW_LATENCY");
mFlagLowLatency = field.getInt(AudioAttributes.class);
mLowLatencySupported = true;
} catch (NoSuchFieldException e) {
mLowLatencySupported = false;
} catch (IllegalAccessException e) {
e.printStackTrace();
}
Method[] methods = AudioTrack.class.getMethods();
for (Method method : methods) {
if (method.getName().equals("setBufferSizeInFrames")) {
mSetBufferSizeMethod = method;
break;
}
}
for (Method method : methods) {
if (method.getName().equals("getBufferCapacity")) {
mGetBufferCapacityMethod = method;
break;
}
}
for (Method method : methods) {
if (method.getName().equals("getXRunCount")) {
mGetUnderrunCountMethod = method;
break;
}
}
}
/**
* @return number of times the audio buffer underflowed and glitched.
*/
public int getUnderrunCount() {
// Call using reflection.
if (mGetUnderrunCountMethod != null && mAudioTrack != null) {
try {
Object result = mGetUnderrunCountMethod.invoke(mAudioTrack);
int count = ((Integer) result).intValue();
return count;
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
return 0;
}
/**
* @return allocated size of the buffer
*/
public int getBufferCapacityInFrames() {
if (mGetBufferCapacityMethod != null) {
try {
Object result = mGetBufferCapacityMethod.invoke(mAudioTrack);
int size = ((Integer) result).intValue();
return size;
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
return mInitialSize;
}
/**
* Set the amount of the buffer capacity that we want to use.
* Lower values will reduce latency but may cause glitches.
* Note that you may not get the size you asked for.
*
* @return actual size of the buffer
*/
public int setBufferSizeInFrames(int thresholdFrames) {
if (mSetBufferSizeMethod != null) {
try {
Object result = mSetBufferSizeMethod.invoke(mAudioTrack, thresholdFrames);
int actual = ((Integer) result).intValue();
return actual;
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
return mInitialSize;
}
public int getBufferSizeInFrames() {
return mAudioTrack.getBufferSizeInFrames();
}
public static boolean isLowLatencySupported() {
return mLowLatencySupported;
}
public static int getLowLatencyFlag() {
return mFlagLowLatency;
}
public void reset() {
mState = STATE_PRIMING;
mPreviousUnderrunCount = 0;
setBufferSizeInFrames(mInitialSize);
}
/**
* This should be called after every write().
* It will lower the latency until there are underruns.
* Then it raises the latency until the underruns stop.
*/
public void update() {
if (!mLowLatencySupported) {
return;
}
int nextState = mState;
int underrunCount;
switch (mState) {
case STATE_PRIMING:
if (mAudioTrack.getPlaybackHeadPosition() > (8 * mFramesPerBlock)) {
nextState = STATE_LOWERING;
mPreviousUnderrunCount = getUnderrunCount();
}
break;
case STATE_LOWERING:
underrunCount = getUnderrunCount();
if (underrunCount > mPreviousUnderrunCount) {
nextState = STATE_RAISING;
} else {
if (incrementThreshold(-1)) {
// If we hit bottom then start raising it back up.
nextState = STATE_RAISING;
}
}
mPreviousUnderrunCount = underrunCount;
break;
case STATE_RAISING:
underrunCount = getUnderrunCount();
if (underrunCount > mPreviousUnderrunCount) {
incrementThreshold(1);
}
mPreviousUnderrunCount = underrunCount;
break;
}
mState = nextState;
}
/**
* Raise or lower the buffer size in blocks.
* @return true if the size did not change
*/
private boolean incrementThreshold(int deltaBlocks) {
int original = getBufferSizeInFrames();
int numBlocks = original / mFramesPerBlock;
numBlocks += deltaBlocks;
int target = numBlocks * mFramesPerBlock;
int actual = setBufferSizeInFrames(target);
Log.i(TAG, "Buffer size changed from " + original + " to " + actual);
return actual == original;
}
}

View File

@ -0,0 +1,115 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mobileer.miditools.synth;
/**
* Very simple Attack, Decay, Sustain, Release envelope with linear ramps.
*
* Times are in seconds.
*/
public class EnvelopeADSR extends SynthUnit {
private static final int IDLE = 0;
private static final int ATTACK = 1;
private static final int DECAY = 2;
private static final int SUSTAIN = 3;
private static final int RELEASE = 4;
private static final int FINISHED = 5;
private static final float MIN_TIME = 0.001f;
private float mAttackRate;
private float mRreleaseRate;
private float mSustainLevel;
private float mDecayRate;
private float mCurrent;
private int mSstate = IDLE;
private int mSamplerate;
public EnvelopeADSR( int sampleRate) {
mSamplerate = sampleRate;
setAttackTime(0.003f);
setDecayTime(0.08f);
setSustainLevel(0.3f);
setReleaseTime(1.0f);
}
public void setAttackTime(float time) {
if (time < MIN_TIME)
time = MIN_TIME;
mAttackRate = 1.0f / (mSamplerate * time);
}
public void setDecayTime(float time) {
if (time < MIN_TIME)
time = MIN_TIME;
mDecayRate = 1.0f / (mSamplerate * time);
}
public void setSustainLevel(float level) {
if (level < 0.0f)
level = 0.0f;
mSustainLevel = level;
}
public void setReleaseTime(float time) {
if (time < MIN_TIME)
time = MIN_TIME;
mRreleaseRate = 1.0f / (mSamplerate * time);
}
public void on() {
mSstate = ATTACK;
}
public void off() {
mSstate = RELEASE;
}
@Override
public float render() {
switch (mSstate) {
case ATTACK:
mCurrent += mAttackRate;
if (mCurrent > 1.0f) {
mCurrent = 1.0f;
mSstate = DECAY;
}
break;
case DECAY:
mCurrent -= mDecayRate;
if (mCurrent < mSustainLevel) {
mCurrent = mSustainLevel;
mSstate = SUSTAIN;
}
break;
case RELEASE:
mCurrent -= mRreleaseRate;
if (mCurrent < 0.0f) {
mCurrent = 0.0f;
mSstate = FINISHED;
}
break;
}
return mCurrent;
}
public boolean isDone() {
return mSstate == FINISHED;
}
}

Some files were not shown because too many files have changed in this diff Show More