Merge "Introduce ApiHelper"
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 3c9bfdd..e7dc0fe 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -319,6 +319,7 @@
srcs: [
"AudioParameter.cpp",
+ "JAudioTrack.cpp",
"MediaPlayer2Factory.cpp",
"MediaPlayer2Manager.cpp",
"TestPlayerStub.cpp",
@@ -327,6 +328,7 @@
],
shared_libs: [
+ "libandroid_runtime",
"libaudioclient",
"libbinder",
"libcutils",
diff --git a/media/libmedia/JAudioTrack.cpp b/media/libmedia/JAudioTrack.cpp
new file mode 100644
index 0000000..b228d8b
--- /dev/null
+++ b/media/libmedia/JAudioTrack.cpp
@@ -0,0 +1,520 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "JAudioTrack"
+
+#include "media/JAudioAttributes.h"
+#include "media/JAudioFormat.h"
+#include "media/JAudioTrack.h"
+
+#include <android_media_AudioErrors.h>
+#include <android_runtime/AndroidRuntime.h>
+
+namespace android {
+
+// TODO: Store Java class/methodID as a member variable in the class.
+// TODO: Add NULL && Exception checks after every JNI call.
+JAudioTrack::JAudioTrack( // < Usages of the arguments are below >
+ audio_stream_type_t streamType, // AudioAudioAttributes
+ uint32_t sampleRate, // AudioFormat && bufferSizeInBytes
+ audio_format_t format, // AudioFormat && bufferSizeInBytes
+ audio_channel_mask_t channelMask, // AudioFormat && bufferSizeInBytes
+ size_t frameCount, // bufferSizeInBytes
+ audio_session_t sessionId, // AudioTrack
+ const audio_attributes_t* pAttributes, // AudioAttributes
+ float maxRequiredSpeed) { // bufferSizeInBytes
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass jAudioTrackCls = env->FindClass("android/media/AudioTrack");
+ mAudioTrackCls = (jclass) env->NewGlobalRef(jAudioTrackCls);
+
+ maxRequiredSpeed = std::min(std::max(maxRequiredSpeed, 1.0f), AUDIO_TIMESTRETCH_SPEED_MAX);
+
+ int bufferSizeInBytes = 0;
+ if (sampleRate == 0 || frameCount > 0) {
+ // Manually calculate buffer size.
+ bufferSizeInBytes = audio_channel_count_from_out_mask(channelMask)
+ * audio_bytes_per_sample(format) * (frameCount > 0 ? frameCount : 1);
+ } else if (sampleRate > 0) {
+ // Call Java AudioTrack::getMinBufferSize().
+ jmethodID jGetMinBufferSize =
+ env->GetStaticMethodID(mAudioTrackCls, "getMinBufferSize", "(III)I");
+ bufferSizeInBytes = env->CallStaticIntMethod(mAudioTrackCls, jGetMinBufferSize,
+ sampleRate, outChannelMaskFromNative(channelMask), audioFormatFromNative(format));
+ }
+ bufferSizeInBytes = (int) (bufferSizeInBytes * maxRequiredSpeed);
+
+ // Create a Java AudioTrack object through its Builder.
+ jclass jBuilderCls = env->FindClass("android/media/AudioTrack$Builder");
+ jmethodID jBuilderCtor = env->GetMethodID(jBuilderCls, "<init>", "()V");
+ jobject jBuilderObj = env->NewObject(jBuilderCls, jBuilderCtor);
+
+ jmethodID jSetAudioAttributes = env->GetMethodID(jBuilderCls, "setAudioAttributes",
+ "(Landroid/media/AudioAttributes;)Landroid/media/AudioTrack$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetAudioAttributes,
+ JAudioAttributes::createAudioAttributesObj(env, pAttributes, streamType));
+
+ jmethodID jSetAudioFormat = env->GetMethodID(jBuilderCls, "setAudioFormat",
+ "(Landroid/media/AudioFormat;)Landroid/media/AudioTrack$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetAudioFormat,
+ JAudioFormat::createAudioFormatObj(env, sampleRate, format, channelMask));
+
+ jmethodID jSetBufferSizeInBytes = env->GetMethodID(jBuilderCls, "setBufferSizeInBytes",
+ "(I)Landroid/media/AudioTrack$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetBufferSizeInBytes, bufferSizeInBytes);
+
+ // We only use streaming mode of Java AudioTrack.
+ jfieldID jModeStream = env->GetStaticFieldID(mAudioTrackCls, "MODE_STREAM", "I");
+ jint transferMode = env->GetStaticIntField(mAudioTrackCls, jModeStream);
+ jmethodID jSetTransferMode = env->GetMethodID(jBuilderCls, "setTransferMode",
+ "(I)Landroid/media/AudioTrack$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetTransferMode,
+ transferMode /* Java AudioTrack::MODE_STREAM */);
+
+ if (sessionId != 0) {
+ jmethodID jSetSessionId = env->GetMethodID(jBuilderCls, "setSessionId",
+ "(I)Landroid/media/AudioTrack$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetSessionId, sessionId);
+ }
+
+ jmethodID jBuild = env->GetMethodID(jBuilderCls, "build", "()Landroid/media/AudioTrack;");
+ mAudioTrackObj = env->CallObjectMethod(jBuilderObj, jBuild);
+}
+
+JAudioTrack::~JAudioTrack() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->DeleteGlobalRef(mAudioTrackCls);
+}
+
+size_t JAudioTrack::frameCount() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetBufferSizeInFrames = env->GetMethodID(
+ mAudioTrackCls, "getBufferSizeInFrames", "()I");
+ return env->CallIntMethod(mAudioTrackObj, jGetBufferSizeInFrames);
+}
+
+size_t JAudioTrack::channelCount() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetChannelCount = env->GetMethodID(mAudioTrackCls, "getChannelCount", "()I");
+ return env->CallIntMethod(mAudioTrackObj, jGetChannelCount);
+}
+
+status_t JAudioTrack::getPosition(uint32_t *position) {
+ if (position == NULL) {
+ return BAD_VALUE;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetPlaybackHeadPosition = env->GetMethodID(
+ mAudioTrackCls, "getPlaybackHeadPosition", "()I");
+ *position = env->CallIntMethod(mAudioTrackObj, jGetPlaybackHeadPosition);
+
+ return NO_ERROR;
+}
+
+bool JAudioTrack::getTimeStamp(AudioTimestamp& timestamp) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ jclass jAudioTimeStampCls = env->FindClass("android/media/AudioTimestamp");
+ jobject jAudioTimeStampObj = env->AllocObject(jAudioTimeStampCls);
+
+ jfieldID jFramePosition = env->GetFieldID(jAudioTimeStampCls, "framePosition", "L");
+ jfieldID jNanoTime = env->GetFieldID(jAudioTimeStampCls, "nanoTime", "L");
+
+ jmethodID jGetTimestamp = env->GetMethodID(mAudioTrackCls,
+ "getTimestamp", "(Landroid/media/AudioTimestamp)B");
+ bool success = env->CallBooleanMethod(mAudioTrackObj, jGetTimestamp, jAudioTimeStampObj);
+
+ if (!success) {
+ return false;
+ }
+
+ long long framePosition = env->GetLongField(jAudioTimeStampObj, jFramePosition);
+ long long nanoTime = env->GetLongField(jAudioTimeStampObj, jNanoTime);
+
+ struct timespec ts;
+ const long long secondToNano = 1000000000LL; // 1E9
+ ts.tv_sec = nanoTime / secondToNano;
+ ts.tv_nsec = nanoTime % secondToNano;
+ timestamp.mTime = ts;
+ timestamp.mPosition = (uint32_t) framePosition;
+
+ return true;
+}
+
+status_t JAudioTrack::setPlaybackRate(const AudioPlaybackRate &playbackRate) {
+ // TODO: existing native AudioTrack returns INVALID_OPERATION on offload/direct/fast tracks.
+ // Should we do the same thing?
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ jclass jPlaybackParamsCls = env->FindClass("android/media/PlaybackParams");
+ jmethodID jPlaybackParamsCtor = env->GetMethodID(jPlaybackParamsCls, "<init>", "()V");
+ jobject jPlaybackParamsObj = env->NewObject(jPlaybackParamsCls, jPlaybackParamsCtor);
+
+ jmethodID jSetAudioFallbackMode = env->GetMethodID(
+ jPlaybackParamsCls, "setAudioFallbackMode", "(I)Landroid/media/PlaybackParams;");
+ jPlaybackParamsObj = env->CallObjectMethod(
+ jPlaybackParamsObj, jSetAudioFallbackMode, playbackRate.mFallbackMode);
+
+ jmethodID jSetAudioStretchMode = env->GetMethodID(
+ jPlaybackParamsCls, "setAudioStretchMode", "(I)Landroid/media/PlaybackParams;");
+ jPlaybackParamsObj = env->CallObjectMethod(
+ jPlaybackParamsObj, jSetAudioStretchMode, playbackRate.mStretchMode);
+
+ jmethodID jSetPitch = env->GetMethodID(
+ jPlaybackParamsCls, "setPitch", "(F)Landroid/media/PlaybackParams;");
+ jPlaybackParamsObj = env->CallObjectMethod(jPlaybackParamsObj, jSetPitch, playbackRate.mPitch);
+
+ jmethodID jSetSpeed = env->GetMethodID(
+ jPlaybackParamsCls, "setSpeed", "(F)Landroid/media/PlaybackParams;");
+ jPlaybackParamsObj = env->CallObjectMethod(jPlaybackParamsObj, jSetSpeed, playbackRate.mSpeed);
+
+
+ // Set this Java PlaybackParams object into Java AudioTrack.
+ jmethodID jSetPlaybackParams = env->GetMethodID(
+ mAudioTrackCls, "setPlaybackParams", "(Landroid/media/PlaybackParams;)V");
+ env->CallVoidMethod(mAudioTrackObj, jSetPlaybackParams, jPlaybackParamsObj);
+ // TODO: Should we catch the Java IllegalArgumentException?
+
+ return NO_ERROR;
+}
+
+const AudioPlaybackRate JAudioTrack::getPlaybackRate() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ jmethodID jGetPlaybackParams = env->GetMethodID(
+ mAudioTrackCls, "getPlaybackParams", "()Landroid/media/PlaybackParams;");
+ jobject jPlaybackParamsObj = env->CallObjectMethod(mAudioTrackObj, jGetPlaybackParams);
+
+ AudioPlaybackRate playbackRate;
+ jclass jPlaybackParamsCls = env->FindClass("android/media/PlaybackParams");
+
+ jmethodID jGetAudioFallbackMode = env->GetMethodID(
+ jPlaybackParamsCls, "getAudioFallbackMode", "()I");
+ // TODO: Should we enable passing AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT?
+ // The enum is internal only, so it is not defined in PlaybackParmas.java.
+ // TODO: Is this right way to convert an int to an enum?
+ playbackRate.mFallbackMode = static_cast<AudioTimestretchFallbackMode>(
+ env->CallIntMethod(jPlaybackParamsObj, jGetAudioFallbackMode));
+
+ jmethodID jGetAudioStretchMode = env->GetMethodID(
+ jPlaybackParamsCls, "getAudioStretchMode", "()I");
+ playbackRate.mStretchMode = static_cast<AudioTimestretchStretchMode>(
+ env->CallIntMethod(jPlaybackParamsObj, jGetAudioStretchMode));
+
+ jmethodID jGetPitch = env->GetMethodID(jPlaybackParamsCls, "getPitch", "()F");
+ playbackRate.mPitch = env->CallFloatMethod(jPlaybackParamsObj, jGetPitch);
+
+ jmethodID jGetSpeed = env->GetMethodID(jPlaybackParamsCls, "getSpeed", "()F");
+ playbackRate.mSpeed = env->CallFloatMethod(jPlaybackParamsObj, jGetSpeed);
+
+ return playbackRate;
+}
+
+media::VolumeShaper::Status JAudioTrack::applyVolumeShaper(
+ const sp<media::VolumeShaper::Configuration>& configuration,
+ const sp<media::VolumeShaper::Operation>& operation) {
+
+ jobject jConfigurationObj = createVolumeShaperConfigurationObj(configuration);
+ jobject jOperationObj = createVolumeShaperOperationObj(operation);
+
+ if (jConfigurationObj == NULL || jOperationObj == NULL) {
+ return media::VolumeShaper::Status(BAD_VALUE);
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ jmethodID jCreateVolumeShaper = env->GetMethodID(mAudioTrackCls, "createVolumeShaper",
+ "(Landroid/media/VolumeShaper$Configuration;)Landroid/media/VolumeShaper;");
+ jobject jVolumeShaperObj = env->CallObjectMethod(
+ mAudioTrackObj, jCreateVolumeShaper, jConfigurationObj);
+
+ jclass jVolumeShaperCls = env->FindClass("android/media/VolumeShaper");
+ jmethodID jApply = env->GetMethodID(jVolumeShaperCls, "apply",
+ "(Landroid/media/VolumeShaper$Operation;)V");
+ env->CallVoidMethod(jVolumeShaperObj, jApply, jOperationObj);
+
+ return media::VolumeShaper::Status(NO_ERROR);
+}
+
+status_t JAudioTrack::setAuxEffectSendLevel(float level) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jSetAuxEffectSendLevel = env->GetMethodID(
+ mAudioTrackCls, "setAuxEffectSendLevel", "(F)I");
+ int result = env->CallIntMethod(mAudioTrackObj, jSetAuxEffectSendLevel, level);
+ return javaToNativeStatus(result);
+}
+
+status_t JAudioTrack::attachAuxEffect(int effectId) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jAttachAuxEffect = env->GetMethodID(mAudioTrackCls, "attachAuxEffect", "(I)I");
+ int result = env->CallIntMethod(mAudioTrackObj, jAttachAuxEffect, effectId);
+ return javaToNativeStatus(result);
+}
+
+status_t JAudioTrack::setVolume(float left, float right) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ // TODO: Java setStereoVolume is deprecated. Do we really need this method?
+ jmethodID jSetStereoVolume = env->GetMethodID(mAudioTrackCls, "setStereoVolume", "(FF)I");
+ int result = env->CallIntMethod(mAudioTrackObj, jSetStereoVolume, left, right);
+ return javaToNativeStatus(result);
+}
+
+status_t JAudioTrack::setVolume(float volume) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jSetVolume = env->GetMethodID(mAudioTrackCls, "setVolume", "(F)I");
+ int result = env->CallIntMethod(mAudioTrackObj, jSetVolume, volume);
+ return javaToNativeStatus(result);
+}
+
+status_t JAudioTrack::start() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jPlay = env->GetMethodID(mAudioTrackCls, "play", "()V");
+ // TODO: Should we catch the Java IllegalStateException from play()?
+ env->CallVoidMethod(mAudioTrackObj, jPlay);
+ return NO_ERROR;
+}
+
+ssize_t JAudioTrack::write(const void* buffer, size_t size, bool blocking) {
+ if (buffer == NULL) {
+ return BAD_VALUE;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jbyteArray jAudioData = env->NewByteArray(size);
+ env->SetByteArrayRegion(jAudioData, 0, size, (jbyte *) buffer);
+
+ jclass jByteBufferCls = env->FindClass("java/nio/ByteBuffer");
+ jmethodID jWrap = env->GetStaticMethodID(jByteBufferCls, "wrap", "([B)Ljava/nio/ByteBuffer;");
+ jobject jByteBufferObj = env->CallStaticObjectMethod(jByteBufferCls, jWrap, jAudioData);
+
+ int writeMode = 0;
+ if (blocking) {
+ jfieldID jWriteBlocking = env->GetStaticFieldID(mAudioTrackCls, "WRITE_BLOCKING", "I");
+ writeMode = env->GetStaticIntField(mAudioTrackCls, jWriteBlocking);
+ } else {
+ jfieldID jWriteNonBlocking = env->GetStaticFieldID(
+ mAudioTrackCls, "WRITE_NON_BLOCKING", "I");
+ writeMode = env->GetStaticIntField(mAudioTrackCls, jWriteNonBlocking);
+ }
+
+ jmethodID jWrite = env->GetMethodID(mAudioTrackCls, "write", "(Ljava/nio/ByteBuffer;II)I");
+ int result = env->CallIntMethod(mAudioTrackObj, jWrite, jByteBufferObj, size, writeMode);
+
+ if (result >= 0) {
+ return result;
+ } else {
+ return javaToNativeStatus(result);
+ }
+}
+
+void JAudioTrack::stop() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jStop = env->GetMethodID(mAudioTrackCls, "stop", "()V");
+ env->CallVoidMethod(mAudioTrackObj, jStop);
+ // TODO: Should we catch IllegalStateException?
+}
+
+// TODO: Is the right implementation?
+bool JAudioTrack::stopped() const {
+ return !isPlaying();
+}
+
+void JAudioTrack::flush() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jFlush = env->GetMethodID(mAudioTrackCls, "flush", "()V");
+ env->CallVoidMethod(mAudioTrackObj, jFlush);
+}
+
+void JAudioTrack::pause() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jPause = env->GetMethodID(mAudioTrackCls, "pause", "()V");
+ env->CallVoidMethod(mAudioTrackObj, jPause);
+ // TODO: Should we catch IllegalStateException?
+}
+
+bool JAudioTrack::isPlaying() const {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetPlayState = env->GetMethodID(mAudioTrackCls, "getPlayState", "()I");
+ int currentPlayState = env->CallIntMethod(mAudioTrackObj, jGetPlayState);
+
+ // TODO: In Java AudioTrack, there is no STOPPING state.
+ // This means while stopping, isPlaying() will return different value in two class.
+ // - in existing native AudioTrack: true
+ // - in JAudioTrack: false
+ // If not okay, also modify the implementation of stopped().
+ jfieldID jPlayStatePlaying = env->GetStaticFieldID(mAudioTrackCls, "PLAYSTATE_PLAYING", "I");
+ int statePlaying = env->GetStaticIntField(mAudioTrackCls, jPlayStatePlaying);
+ return currentPlayState == statePlaying;
+}
+
+uint32_t JAudioTrack::getSampleRate() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetSampleRate = env->GetMethodID(mAudioTrackCls, "getSampleRate", "()I");
+ return env->CallIntMethod(mAudioTrackObj, jGetSampleRate);
+}
+
+status_t JAudioTrack::getBufferDurationInUs(int64_t *duration) {
+ if (duration == nullptr) {
+ return BAD_VALUE;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetBufferSizeInFrames = env->GetMethodID(
+ mAudioTrackCls, "getBufferSizeInFrames", "()I");
+ int bufferSizeInFrames = env->CallIntMethod(mAudioTrackObj, jGetBufferSizeInFrames);
+
+ const double secondToMicro = 1000000LL; // 1E6
+ int sampleRate = JAudioTrack::getSampleRate();
+ float speed = JAudioTrack::getPlaybackRate().mSpeed;
+
+ *duration = (int64_t) (bufferSizeInFrames * secondToMicro / (sampleRate * speed));
+ return NO_ERROR;
+}
+
+audio_format_t JAudioTrack::format() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jmethodID jGetAudioFormat = env->GetMethodID(mAudioTrackCls, "getAudioFormat", "()I");
+ int javaFormat = env->CallIntMethod(mAudioTrackObj, jGetAudioFormat);
+ return audioFormatToNative(javaFormat);
+}
+
+jobject JAudioTrack::createVolumeShaperConfigurationObj(
+ const sp<media::VolumeShaper::Configuration>& config) {
+
+ // TODO: Java VolumeShaper's setId() / setOptionFlags() are hidden.
+ if (config == NULL || config->getType() == media::VolumeShaper::Configuration::TYPE_ID) {
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ // Referenced "android_media_VolumeShaper.h".
+ jfloatArray xarray = nullptr;
+ jfloatArray yarray = nullptr;
+ if (config->getType() == media::VolumeShaper::Configuration::TYPE_SCALE) {
+ // convert curve arrays
+ xarray = env->NewFloatArray(config->size());
+ yarray = env->NewFloatArray(config->size());
+ float * const x = env->GetFloatArrayElements(xarray, nullptr /* isCopy */);
+ float * const y = env->GetFloatArrayElements(yarray, nullptr /* isCopy */);
+ float *xptr = x, *yptr = y;
+ for (const auto &pt : *config.get()) {
+ *xptr++ = pt.first;
+ *yptr++ = pt.second;
+ }
+ env->ReleaseFloatArrayElements(xarray, x, 0 /* mode */);
+ env->ReleaseFloatArrayElements(yarray, y, 0 /* mode */);
+ }
+
+ jclass jBuilderCls = env->FindClass("android/media/VolumeShaper$Configuration$Builder");
+ jmethodID jBuilderCtor = env->GetMethodID(jBuilderCls, "<init>", "()V");
+ jobject jBuilderObj = env->NewObject(jBuilderCls, jBuilderCtor);
+
+ jmethodID jSetDuration = env->GetMethodID(jBuilderCls, "setDuration",
+ "(L)Landroid/media/VolumeShaper$Configuration$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jSetDuration, (jlong) config->getDurationMs());
+
+ jmethodID jSetInterpolatorType = env->GetMethodID(jBuilderCls, "setInterpolatorType",
+ "(I)Landroid/media/VolumeShaper$Configuration$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jSetInterpolatorType,
+ config->getInterpolatorType());
+
+ jmethodID jSetCurve = env->GetMethodID(jBuilderCls, "setCurve",
+ "([F[F)Landroid/media/VolumeShaper$Configuration$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jSetCurve, xarray, yarray);
+
+ jmethodID jBuild = env->GetMethodID(jBuilderCls, "build",
+ "()Landroid/media/VolumeShaper$Configuration;");
+ return env->CallObjectMethod(jBuilderObj, jBuild);
+}
+
+jobject JAudioTrack::createVolumeShaperOperationObj(
+ const sp<media::VolumeShaper::Operation>& operation) {
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ jclass jBuilderCls = env->FindClass("android/media/VolumeShaper$Operation$Builder");
+ jmethodID jBuilderCtor = env->GetMethodID(jBuilderCls, "<init>", "()V");
+ jobject jBuilderObj = env->NewObject(jBuilderCls, jBuilderCtor);
+
+ // Set XOffset
+ jmethodID jSetXOffset = env->GetMethodID(jBuilderCls, "setXOffset",
+ "(F)Landroid/media/VolumeShaper$Operation$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jSetXOffset, operation->getXOffset());
+
+ int32_t flags = operation->getFlags();
+
+ if (operation->getReplaceId() >= 0) {
+ jmethodID jReplace = env->GetMethodID(jBuilderCls, "replace",
+ "(IB)Landroid/media/VolumeShaper$Operation$Builder;");
+ bool join = (flags | media::VolumeShaper::Operation::FLAG_JOIN) != 0;
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jReplace, operation->getReplaceId(), join);
+ }
+
+ if (flags | media::VolumeShaper::Operation::FLAG_REVERSE) {
+ jmethodID jReverse = env->GetMethodID(jBuilderCls, "reverse",
+ "()Landroid/media/VolumeShaper$Operation$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jReverse);
+ }
+
+ // TODO: VolumeShaper Javadoc says "Do not call terminate() directly". Can we call this?
+ if (flags | media::VolumeShaper::Operation::FLAG_TERMINATE) {
+ jmethodID jTerminate = env->GetMethodID(jBuilderCls, "terminate",
+ "()Landroid/media/VolumeShaper$Operation$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jTerminate);
+ }
+
+ if (flags | media::VolumeShaper::Operation::FLAG_DELAY) {
+ jmethodID jDefer = env->GetMethodID(jBuilderCls, "defer",
+ "()Landroid/media/VolumeShaper$Operation$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jDefer);
+ }
+
+ if (flags | media::VolumeShaper::Operation::FLAG_CREATE_IF_NECESSARY) {
+ jmethodID jCreateIfNeeded = env->GetMethodID(jBuilderCls, "createIfNeeded",
+ "()Landroid/media/VolumeShaper$Operation$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderCls, jCreateIfNeeded);
+ }
+
+ // TODO: Handle error case (can it be NULL?)
+ jmethodID jBuild = env->GetMethodID(jBuilderCls, "build",
+ "()Landroid/media/VolumeShaper$Operation;");
+ return env->CallObjectMethod(jBuilderObj, jBuild);
+}
+
+status_t JAudioTrack::javaToNativeStatus(int javaStatus) {
+ switch (javaStatus) {
+ case AUDIO_JAVA_SUCCESS:
+ return NO_ERROR;
+ case AUDIO_JAVA_BAD_VALUE:
+ return BAD_VALUE;
+ case AUDIO_JAVA_INVALID_OPERATION:
+ return INVALID_OPERATION;
+ case AUDIO_JAVA_PERMISSION_DENIED:
+ return PERMISSION_DENIED;
+ case AUDIO_JAVA_NO_INIT:
+ return NO_INIT;
+ case AUDIO_JAVA_WOULD_BLOCK:
+ return WOULD_BLOCK;
+ case AUDIO_JAVA_DEAD_OBJECT:
+ return DEAD_OBJECT;
+ default:
+ return UNKNOWN_ERROR;
+ }
+}
+
+} // namespace android
diff --git a/media/libmedia/include/media/JAudioAttributes.h b/media/libmedia/include/media/JAudioAttributes.h
new file mode 100644
index 0000000..fb11435
--- /dev/null
+++ b/media/libmedia/include/media/JAudioAttributes.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_JAUDIOATTRIBUTES_H
+#define ANDROID_JAUDIOATTRIBUTES_H
+
+#include <jni.h>
+#include <system/audio.h>
+
+namespace android {
+
+class JAudioAttributes {
+public:
+ /* Creates a Java AudioAttributes object. */
+ static jobject createAudioAttributesObj(JNIEnv *env,
+ const audio_attributes_t* pAttributes,
+ audio_stream_type_t streamType) {
+
+ jclass jBuilderCls = env->FindClass("android/media/AudioAttributes$Builder");
+ jmethodID jBuilderCtor = env->GetMethodID(jBuilderCls, "<init>", "()V");
+ jobject jBuilderObj = env->NewObject(jBuilderCls, jBuilderCtor);
+
+ if (pAttributes != NULL) {
+ // If pAttributes is not null, streamType is ignored.
+ jmethodID jSetUsage = env->GetMethodID(
+ jBuilderCls, "setUsage", "(I)Landroid/media/AudioAttributes$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetUsage, pAttributes->usage);
+
+ jmethodID jSetContentType = env->GetMethodID(jBuilderCls, "setContentType",
+ "(I)Landroid/media/AudioAttributes$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetContentType,
+ pAttributes->content_type);
+
+ // TODO: Java AudioAttributes.Builder.setCapturePreset() is systemApi and hidden.
+ // Can we use this method?
+// jmethodID jSetCapturePreset = env->GetMethodID(jBuilderCls, "setCapturePreset",
+// "(I)Landroid/media/AudioAttributes$Builder;");
+// jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetCapturePreset,
+// pAttributes->source);
+
+ jmethodID jSetFlags = env->GetMethodID(jBuilderCls, "setFlags",
+ "(I)Landroid/media/AudioAttributes$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetFlags, pAttributes->flags);
+
+ // TODO: Handle the 'tags' (char[] to HashSet<String>).
+ // How to parse the char[]? Is there any example of it?
+ // Also, the addTags() method is hidden.
+ } else {
+ // Call AudioAttributes.Builder.setLegacyStreamType().build()
+ jmethodID jSetLegacyStreamType = env->GetMethodID(jBuilderCls, "setLegacyStreamType",
+ "(I)Landroid/media/AudioAttributes$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetLegacyStreamType, streamType);
+ }
+
+ jmethodID jBuild = env->GetMethodID(jBuilderCls, "build",
+ "()Landroid/media/AudioAttributes;");
+ return env->CallObjectMethod(jBuilderObj, jBuild);
+ }
+
+};
+
+} // namespace android
+
+#endif // ANDROID_JAUDIOATTRIBUTES_H
diff --git a/media/libmedia/include/media/JAudioFormat.h b/media/libmedia/include/media/JAudioFormat.h
new file mode 100644
index 0000000..00abdff
--- /dev/null
+++ b/media/libmedia/include/media/JAudioFormat.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_JAUDIOFORMAT_H
+#define ANDROID_JAUDIOFORMAT_H
+
+#include <android_media_AudioFormat.h>
+#include <jni.h>
+
+namespace android {
+
+class JAudioFormat {
+public:
+ /* Creates a Java AudioFormat object. */
+ static jobject createAudioFormatObj(JNIEnv *env,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask) {
+
+ jclass jBuilderCls = env->FindClass("android/media/AudioFormat$Builder");
+ jmethodID jBuilderCtor = env->GetMethodID(jBuilderCls, "<init>", "()V");
+ jobject jBuilderObj = env->NewObject(jBuilderCls, jBuilderCtor);
+
+ if (sampleRate == 0) {
+ jclass jAudioFormatCls = env->FindClass("android/media/AudioFormat");
+ jfieldID jSampleRateUnspecified =
+ env->GetStaticFieldID(jAudioFormatCls, "SAMPLE_RATE_UNSPECIFIED", "I");
+ sampleRate = env->GetStaticIntField(jAudioFormatCls, jSampleRateUnspecified);
+ }
+
+ jmethodID jSetEncoding = env->GetMethodID(jBuilderCls, "setEncoding",
+ "(I)Landroid/media/AudioFormat$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetEncoding,
+ audioFormatFromNative(format));
+
+ jmethodID jSetSampleRate = env->GetMethodID(jBuilderCls, "setSampleRate",
+ "(I)Landroid/media/AudioFormat$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetSampleRate, sampleRate);
+
+ jmethodID jSetChannelMask = env->GetMethodID(jBuilderCls, "setChannelMask",
+ "(I)Landroid/media/AudioFormat$Builder;");
+ jBuilderObj = env->CallObjectMethod(jBuilderObj, jSetChannelMask,
+ outChannelMaskFromNative(channelMask));
+
+ jmethodID jBuild = env->GetMethodID(jBuilderCls, "build", "()Landroid/media/AudioFormat;");
+ return env->CallObjectMethod(jBuilderObj, jBuild);
+ }
+
+};
+
+} // namespace android
+
+#endif // ANDROID_JAUDIOFORMAT_H
diff --git a/media/libmedia/include/media/JAudioTrack.h b/media/libmedia/include/media/JAudioTrack.h
new file mode 100644
index 0000000..8af30b7
--- /dev/null
+++ b/media/libmedia/include/media/JAudioTrack.h
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_JAUDIOTRACK_H
+#define ANDROID_JAUDIOTRACK_H
+
+#include <jni.h>
+#include <media/AudioResamplerPublic.h>
+#include <media/VolumeShaper.h>
+#include <system/audio.h>
+#include <utils/Errors.h>
+
+#include <media/AudioTimestamp.h> // It has dependency on audio.h/Errors.h, but doesn't
+ // include them in it. Therefore it is included here at last.
+
+namespace android {
+
+class JAudioTrack {
+public:
+
+ /* Creates an JAudioTrack object for non-offload mode.
+ * Once created, the track needs to be started before it can be used.
+ * Unspecified values are set to appropriate default values.
+ *
+ * Parameters:
+ *
+ * streamType: Select the type of audio stream this track is attached to
+ * (e.g. AUDIO_STREAM_MUSIC).
+ * sampleRate: Data source sampling rate in Hz. Zero means to use the sink sample rate.
+ * A non-zero value must be specified if AUDIO_OUTPUT_FLAG_DIRECT is set.
+ * 0 will not work with current policy implementation for direct output
+ * selection where an exact match is needed for sampling rate.
+ * (TODO: Check direct output after flags can be used in Java AudioTrack.)
+ * format: Audio format. For mixed tracks, any PCM format supported by server is OK.
+ * For direct and offloaded tracks, the possible format(s) depends on the
+ * output sink.
+ * (TODO: How can we check whether a format is supported?)
+ * channelMask: Channel mask, such that audio_is_output_channel(channelMask) is true.
+ * frameCount: Minimum size of track PCM buffer in frames. This defines the
+ * application's contribution to the latency of the track.
+ * The actual size selected by the JAudioTrack could be larger if the
+ * requested size is not compatible with current audio HAL configuration.
+ * Zero means to use a default value.
+ * sessionId: Specific session ID, or zero to use default.
+ * pAttributes: If not NULL, supersedes streamType for use case selection.
+ * maxRequiredSpeed: For PCM tracks, this creates an appropriate buffer size that will allow
+ * maxRequiredSpeed playback. Values less than 1.0f and greater than
+ * AUDIO_TIMESTRETCH_SPEED_MAX will be clamped. For non-PCM tracks
+ * and direct or offloaded tracks, this parameter is ignored.
+ * (TODO: Handle this after offload / direct track is supported.)
+ *
+ * TODO: Revive removed arguments after offload mode is supported.
+ */
+ JAudioTrack(audio_stream_type_t streamType,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ size_t frameCount = 0,
+ audio_session_t sessionId = AUDIO_SESSION_ALLOCATE,
+ const audio_attributes_t* pAttributes = NULL,
+ float maxRequiredSpeed = 1.0f);
+
+ /*
+ Temporarily removed constructor arguments:
+
+ // Q. Values are in audio-base.h, but where can we find explanation for them?
+ audio_output_flags_t flags,
+
+ // Q. May be used in AudioTrack.setPreferredDevice(AudioDeviceInfo)?
+ audio_port_handle_t selectedDeviceId,
+
+ // Should be deleted, since we don't use Binder anymore.
+ bool doNotReconnect,
+
+ // Do we need UID and PID?
+ uid_t uid,
+ pid_t pid,
+
+ // TODO: Uses these values when Java AudioTrack supports the offload mode.
+ callback_t cbf,
+ void* user,
+ int32_t notificationFrames,
+ const audio_offload_info_t *offloadInfo,
+
+ // Fixed to false, but what is this?
+ threadCanCallJava
+ */
+
+ virtual ~JAudioTrack();
+
+ size_t frameCount();
+ size_t channelCount();
+
+ /* Return the total number of frames played since playback start.
+ * The counter will wrap (overflow) periodically, e.g. every ~27 hours at 44.1 kHz.
+ * It is reset to zero by flush(), reload(), and stop().
+ *
+ * Parameters:
+ *
+ * position: Address where to return play head position.
+ *
+ * Returned status (from utils/Errors.h) can be:
+ * - NO_ERROR: successful operation
+ * - BAD_VALUE: position is NULL
+ */
+ status_t getPosition(uint32_t *position);
+
+ // TODO: Does this comment apply same to Java AudioTrack::getTimestamp?
+ // Changed the return type from status_t to bool, since Java AudioTrack::getTimestamp returns
+ // boolean. Will Java getTimestampWithStatus() be public?
+ /* Poll for a timestamp on demand.
+ * Use if EVENT_NEW_TIMESTAMP is not delivered often enough for your needs,
+ * or if you need to get the most recent timestamp outside of the event callback handler.
+ * Caution: calling this method too often may be inefficient;
+ * if you need a high resolution mapping between frame position and presentation time,
+ * consider implementing that at application level, based on the low resolution timestamps.
+ * Returns true if timestamp is valid.
+ * The timestamp parameter is undefined on return, if false is returned.
+ */
+ bool getTimeStamp(AudioTimestamp& timestamp);
+
+ /* Set source playback rate for timestretch
+ * 1.0 is normal speed: < 1.0 is slower, > 1.0 is faster
+ * 1.0 is normal pitch: < 1.0 is lower pitch, > 1.0 is higher pitch
+ *
+ * AUDIO_TIMESTRETCH_SPEED_MIN <= speed <= AUDIO_TIMESTRETCH_SPEED_MAX
+ * AUDIO_TIMESTRETCH_PITCH_MIN <= pitch <= AUDIO_TIMESTRETCH_PITCH_MAX
+ *
+ * Speed increases the playback rate of media, but does not alter pitch.
+ * Pitch increases the "tonal frequency" of media, but does not affect the playback rate.
+ */
+ status_t setPlaybackRate(const AudioPlaybackRate &playbackRate);
+
+ /* Return current playback rate */
+ const AudioPlaybackRate getPlaybackRate();
+
+ /* Sets the volume shaper object */
+ media::VolumeShaper::Status applyVolumeShaper(
+ const sp<media::VolumeShaper::Configuration>& configuration,
+ const sp<media::VolumeShaper::Operation>& operation);
+
+ /* Set the send level for this track. An auxiliary effect should be attached
+ * to the track with attachEffect(). Level must be >= 0.0 and <= 1.0.
+ */
+ status_t setAuxEffectSendLevel(float level);
+
+ /* Attach track auxiliary output to specified effect. Use effectId = 0
+ * to detach track from effect.
+ *
+ * Parameters:
+ *
+ * effectId: effectId obtained from AudioEffect::id().
+ *
+ * Returned status (from utils/Errors.h) can be:
+ * - NO_ERROR: successful operation
+ * - INVALID_OPERATION: The effect is not an auxiliary effect.
+ * - BAD_VALUE: The specified effect ID is invalid.
+ */
+ status_t attachAuxEffect(int effectId);
+
+ /* Set volume for this track, mostly used for games' sound effects
+ * left and right volumes. Levels must be >= 0.0 and <= 1.0.
+ * This is the older API. New applications should use setVolume(float) when possible.
+ */
+ status_t setVolume(float left, float right);
+
+ /* Set volume for all channels. This is the preferred API for new applications,
+ * especially for multi-channel content.
+ */
+ status_t setVolume(float volume);
+
+ // TODO: Does this comment equally apply to the Java AudioTrack::play()?
+ /* After it's created the track is not active. Call start() to
+ * make it active. If set, the callback will start being called.
+ * If the track was previously paused, volume is ramped up over the first mix buffer.
+ */
+ status_t start();
+
+ // TODO: Does this comment still applies? It seems not. (obtainBuffer, AudioFlinger, ...)
+ /* As a convenience we provide a write() interface to the audio buffer.
+ * Input parameter 'size' is in byte units.
+ * This is implemented on top of obtainBuffer/releaseBuffer. For best
+ * performance use callbacks. Returns actual number of bytes written >= 0,
+ * or one of the following negative status codes:
+ * INVALID_OPERATION AudioTrack is configured for static buffer or streaming mode
+ * BAD_VALUE size is invalid
+ * WOULD_BLOCK when obtainBuffer() returns same, or
+ * AudioTrack was stopped during the write
+ * DEAD_OBJECT when AudioFlinger dies or the output device changes and
+ * the track cannot be automatically restored.
+ * The application needs to recreate the AudioTrack
+ * because the audio device changed or AudioFlinger died.
+ * This typically occurs for direct or offload tracks
+ * or if mDoNotReconnect is true.
+ * or any other error code returned by IAudioTrack::start() or restoreTrack_l().
+ * Default behavior is to only return when all data has been transferred. Set 'blocking' to
+ * false for the method to return immediately without waiting to try multiple times to write
+ * the full content of the buffer.
+ */
+ ssize_t write(const void* buffer, size_t size, bool blocking = true);
+
+ // TODO: Does this comment equally apply to the Java AudioTrack::stop()?
+ /* Stop a track.
+ * In static buffer mode, the track is stopped immediately.
+ * In streaming mode, the callback will cease being called. Note that obtainBuffer() still
+ * works and will fill up buffers until the pool is exhausted, and then will return WOULD_BLOCK.
+ * In streaming mode the stop does not occur immediately: any data remaining in the buffer
+ * is first drained, mixed, and output, and only then is the track marked as stopped.
+ */
+ void stop();
+ bool stopped() const;
+
+ // TODO: Does this comment equally apply to the Java AudioTrack::flush()?
+ /* Flush a stopped or paused track. All previously buffered data is discarded immediately.
+ * This has the effect of draining the buffers without mixing or output.
+ * Flush is intended for streaming mode, for example before switching to non-contiguous content.
+ * This function is a no-op if the track is not stopped or paused, or uses a static buffer.
+ */
+ void flush();
+
+ // TODO: Does this comment equally apply to the Java AudioTrack::pause()?
+ // At least we are not using obtainBuffer.
+ /* Pause a track. After pause, the callback will cease being called and
+ * obtainBuffer returns WOULD_BLOCK. Note that obtainBuffer() still works
+ * and will fill up buffers until the pool is exhausted.
+ * Volume is ramped down over the next mix buffer following the pause request,
+ * and then the track is marked as paused. It can be resumed with ramp up by start().
+ */
+ void pause();
+
+ bool isPlaying() const;
+
+ /* Return current source sample rate in Hz.
+ * If specified as zero in constructor, this will be the sink sample rate.
+ */
+ uint32_t getSampleRate();
+
+ /* Returns the buffer duration in microseconds at current playback rate. */
+ status_t getBufferDurationInUs(int64_t *duration);
+
+ audio_format_t format();
+
+private:
+ jclass mAudioTrackCls;
+ jobject mAudioTrackObj;
+
+ /* Creates a Java VolumeShaper.Configuration object from VolumeShaper::Configuration */
+ jobject createVolumeShaperConfigurationObj(
+ const sp<media::VolumeShaper::Configuration>& config);
+
+ /* Creates a Java VolumeShaper.Operation object from VolumeShaper::Operation */
+ jobject createVolumeShaperOperationObj(
+ const sp<media::VolumeShaper::Operation>& operation);
+
+ status_t javaToNativeStatus(int javaStatus);
+};
+
+}; // namespace android
+
+#endif // ANDROID_JAUDIOTRACK_H