Merge "Remove libmediametrics dependency from libmediaextractor"
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 61deb46..c0eb5c1 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -251,6 +251,36 @@
* @see ACameraDevice_createCaptureRequest
*/
TEMPLATE_MANUAL = 6,
+
+ /**
+ * A template for selecting camera parameters that match TEMPLATE_PREVIEW as closely as
+ * possible while improving the camera output for motion tracking use cases.
+ *
+ * <p>This template is best used by applications that are frequently switching between motion
+ * tracking use cases and regular still capture use cases, to minimize the IQ changes
+ * when swapping use cases.</p>
+ *
+ * <p>This template is guaranteed to be supported on camera devices that support the
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING} capability.</p>
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_MOTION_TRACKING_PREVIEW = 7,
+
+ /**
+ * A template for selecting camera parameters that maximize the quality of camera output for
+ * motion tracking use cases.
+ *
+ * <p>This template is best used by applications dedicated to motion tracking applications,
+ * which aren't concerned about fast switches between motion tracking and other use cases.</p>
+ *
+ * <p>This template is guaranteed to be supported on camera devices that support the
+ * {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING} capability.</p>
+ *
+ * @see ACameraDevice_createCaptureRequest
+ */
+ TEMPLATE_MOTION_TRACKING_BEST = 8,
+
} ACameraDevice_request_template;
/**
@@ -760,4 +790,3 @@
#endif /* _NDK_CAMERA_DEVICE_H */
/** @} */
-
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 80d460f..2c144b7 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -837,10 +837,13 @@
*
* <p>This control (except for MANUAL) is only effective if
* <code>ACAMERA_CONTROL_MODE != OFF</code> and any 3A routine is active.</p>
- * <p>ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
- * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
- * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains MANUAL_SENSOR. Other intent values are
- * always supported.</p>
+ * <p>All intents are supported by all devices, except that:
+ * * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * PRIVATE_REPROCESSING or YUV_REPROCESSING.
+ * * MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MANUAL_SENSOR.
+ * * MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MOTION_TRACKING.</p>
*
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -2235,34 +2238,31 @@
* </ul></p>
*
* <p>The position of the camera device's lens optical center,
- * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
- * optical center of the largest camera device facing in the
- * same direction as this camera, in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
- * axes</a>. Note that only the axis definitions are shared with
- * the sensor coordinate system, but not the origin.</p>
- * <p>If this device is the largest or only camera device with a
- * given facing, then this position will be <code>(0, 0, 0)</code>; a
- * camera device with a lens optical center located 3 cm from
- * the main sensor along the +X axis (to the right from the
- * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
- * <p>To transform a pixel coordinates between two cameras
- * facing the same direction, first the source camera
- * ACAMERA_LENS_RADIAL_DISTORTION must be corrected for. Then
- * the source camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs
- * to be applied, followed by the ACAMERA_LENS_POSE_ROTATION
- * of the source camera, the translation of the source camera
- * relative to the destination camera, the
- * ACAMERA_LENS_POSE_ROTATION of the destination camera, and
- * finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION
- * of the destination camera. This obtains a
- * radial-distortion-free coordinate in the destination
- * camera pixel coordinates.</p>
- * <p>To compare this against a real image from the destination
- * camera, the destination camera image then needs to be
- * corrected for radial distortion before comparison or
- * sampling.</p>
+ * as a three-dimensional vector <code>(x,y,z)</code>.</p>
+ * <p>Prior to Android P, or when ACAMERA_LENS_POSE_REFERENCE is PRIMARY_CAMERA, this position
+ * is relative to the optical center of the largest camera device facing in the same
+ * direction as this camera, in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor
+ * coordinate axes</a>. Note that only the axis definitions are shared with the sensor
+ * coordinate system, but not the origin.</p>
+ * <p>If this device is the largest or only camera device with a given facing, then this
+ * position will be <code>(0, 0, 0)</code>; a camera device with a lens optical center located 3 cm
+ * from the main sensor along the +X axis (to the right from the user's perspective) will
+ * report <code>(0.03, 0, 0)</code>.</p>
+ * <p>To transform a pixel coordinates between two cameras facing the same direction, first
+ * the source camera ACAMERA_LENS_RADIAL_DISTORTION must be corrected for. Then the source
+ * camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs to be applied, followed by the
+ * ACAMERA_LENS_POSE_ROTATION of the source camera, the translation of the source camera
+ * relative to the destination camera, the ACAMERA_LENS_POSE_ROTATION of the destination
+ * camera, and finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION of the destination
+ * camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
+ * coordinates.</p>
+ * <p>To compare this against a real image from the destination camera, the destination camera
+ * image then needs to be corrected for radial distortion before comparison or sampling.</p>
+ * <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
+ * the center of the primary gyroscope on the device.</p>
*
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_RADIAL_DISTORTION
*/
@@ -2433,6 +2433,26 @@
*/
ACAMERA_LENS_RADIAL_DISTORTION = // float[6]
ACAMERA_LENS_START + 11,
+ /**
+ * <p>The origin for ACAMERA_LENS_POSE_TRANSLATION.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ *
+ * <p>Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>Different calibration methods and use cases can produce better or worse results
+ * depending on the selected coordinate origin.</p>
+ * <p>For devices designed to support the MOTION_TRACKING capability, the GYROSCOPE origin
+ * makes device calibration and later usage by applications combining camera and gyroscope
+ * information together simpler.</p>
+ */
+ ACAMERA_LENS_POSE_REFERENCE = // byte (acamera_metadata_enum_android_lens_pose_reference_t)
+ ACAMERA_LENS_START + 12,
ACAMERA_LENS_END,
/**
@@ -2895,7 +2915,7 @@
* time-consuming hardware re-configuration or internal camera pipeline
* change. For performance reasons we advise clients to pass their initial
* values as part of
- * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.i
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
* Once the camera capture session is enabled it is also recommended to avoid
* changing them from their initial values set in
* {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
@@ -4908,6 +4928,23 @@
*/
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
ACAMERA_INFO_START,
+ /**
+ * <p>A short string for manufacturer version information about the camera device, such as
+ * ISP hardware, sensors, etc.</p>
+ *
+ * <p>Type: byte</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This can be used in <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_IMAGE_DESCRIPTION">TAG_IMAGE_DESCRIPTION</a>
+ * in jpeg EXIF. This key may be absent if no version information is available on the
+ * device.</p>
+ */
+ ACAMERA_INFO_VERSION = // byte
+ ACAMERA_INFO_START + 1,
ACAMERA_INFO_END,
/**
@@ -5700,6 +5737,15 @@
*/
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
+ /**
+ * <p>This request is for a motion tracking use case, where
+ * the application will use camera and inertial sensor data to
+ * locate and track objects in the world.</p>
+ * <p>The camera device auto-exposure routine will limit the exposure time
+ * of the camera to no more than 20 milliseconds, to minimize motion blur.</p>
+ */
+ ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7,
+
} acamera_metadata_enum_android_control_capture_intent_t;
// ACAMERA_CONTROL_EFFECT_MODE
@@ -6411,6 +6457,28 @@
} acamera_metadata_enum_android_lens_state_t;
+// ACAMERA_LENS_POSE_REFERENCE
+typedef enum acamera_metadata_enum_acamera_lens_pose_reference {
+ /**
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the optical center of
+ * the largest camera device facing the same direction as this camera.</p>
+ * <p>This default value for API levels before Android P.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_PRIMARY_CAMERA = 0,
+
+ /**
+ * <p>The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the position of the
+ * primary gyroscope of this Android device.</p>
+ * <p>This is the value reported by all devices that support the MOTION_TRACKING capability.</p>
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE = 1,
+
+} acamera_metadata_enum_android_lens_pose_reference_t;
+
// ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
typedef enum acamera_metadata_enum_acamera_lens_info_focus_distance_calibration {
@@ -6743,6 +6811,7 @@
* </ul>
* </li>
* <li>The ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE entry is listed by this device.</li>
+ * <li>As of Android P, the ACAMERA_LENS_POSE_REFERENCE entry is listed by this device.</li>
* <li>A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
* normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
* format.</li>
@@ -6758,12 +6827,57 @@
* @see ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE
* @see ACAMERA_LENS_FACING
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
* @see ACAMERA_LENS_RADIAL_DISTORTION
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8,
+ /**
+ * <p>The device supports controls and metadata required for accurate motion tracking for
+ * use cases such as augmented reality, electronic image stabilization, and so on.</p>
+ * <p>This means this camera device has accurate optical calibration and timestamps relative
+ * to the inertial sensors.</p>
+ * <p>This capability requires the camera device to support the following:</p>
+ * <ul>
+ * <li>Capture request templates <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#TEMPLATE_MOTION_TRACKING_PREVIEW">CameraDevice#TEMPLATE_MOTION_TRACKING_PREVIEW</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#TEMPLATE_MOTION_TRACKING_BEST">CameraDevice#TEMPLATE_MOTION_TRACKING_BEST</a> are defined.</li>
+ * <li>The stream configurations listed in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for MOTION_TRACKING are
+ * supported, either at 30 or 60fps maximum frame rate.</li>
+ * <li>The following camera characteristics and capture result metadata are provided:<ul>
+ * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+ * <li>ACAMERA_LENS_RADIAL_DISTORTION</li>
+ * <li>ACAMERA_LENS_POSE_ROTATION</li>
+ * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+ * <li>ACAMERA_LENS_POSE_REFERENCE with value GYROSCOPE</li>
+ * </ul>
+ * </li>
+ * <li>The ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE field has value <code>REALTIME</code>. When compared to
+ * timestamps from the device's gyroscopes, the clock difference for events occuring at
+ * the same actual time instant will be less than 1 ms.</li>
+ * <li>The value of the ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW field is accurate to within 1 ms.</li>
+ * <li>The value of ACAMERA_SENSOR_EXPOSURE_TIME is guaranteed to be available in the
+ * capture result.</li>
+ * <li>The ACAMERA_CONTROL_CAPTURE_INTENT control supports MOTION_TRACKING to limit maximum
+ * exposure to 20 milliseconds.</li>
+ * <li>The stream configurations required for MOTION_TRACKING (listed at <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) can operate at least at
+ * 30fps; optionally, they can operate at 60fps, and '[60, 60]' is listed in
+ * ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES.</li>
+ * </ul>
+ *
+ * @see ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
+ * @see ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING = 10,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
index 7c43994..944002d 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
@@ -142,25 +142,24 @@
ssize_t index = mByteArrayProperties.indexOfKey(name);
if (index < 0) {
ALOGE("App requested unknown property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
value = mByteArrayProperties.valueAt(index);
return android::OK;
}
status_t DrmPlugin::setPropertyByteArray(
- const String8& name, const Vector<uint8_t>& value) {
+ const String8& name, const Vector<uint8_t>& value)
+{
+ UNUSED(value);
if (0 == name.compare(kDeviceIdKey)) {
ALOGD("Cannot set immutable property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
- ssize_t status = mByteArrayProperties.replaceValueFor(name, value);
- if (status >= 0) {
- return android::OK;
- }
+ // Setting of undefined properties is not supported
ALOGE("Failed to set property byte array, key=%s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
status_t DrmPlugin::getPropertyString(
@@ -168,7 +167,7 @@
ssize_t index = mStringProperties.indexOfKey(name);
if (index < 0) {
ALOGE("App requested unknown property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
value = mStringProperties.valueAt(index);
return android::OK;
@@ -182,12 +181,18 @@
kVendorKey.string(), kVersionKey.string());
if (immutableKeys.contains(name.string())) {
ALOGD("Cannot set immutable property: %s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_CANNOT_HANDLE;
+ }
+
+ ssize_t index = mStringProperties.indexOfKey(name);
+ if (index < 0) {
+ ALOGE("Cannot set undefined property string, key=%s", name.string());
+ return android::ERROR_DRM_CANNOT_HANDLE;
}
if (mStringProperties.add(name, value) < 0) {
ALOGE("Failed to set property string, key=%s", name.string());
- return android::BAD_VALUE;
+ return android::ERROR_DRM_UNKNOWN;
}
return android::OK;
}
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index fa8fdc9..5b7d956 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -3,6 +3,10 @@
gtest: false,
srcs: ["src/loopback.cpp"],
cflags: ["-Wall", "-Werror"],
- shared_libs: ["libaaudio"],
+ static_libs: ["libsndfile"],
+ shared_libs: [
+ "libaaudio",
+ "libaudioutils",
+ ],
header_libs: ["libaaudio_example_utils"],
}
diff --git a/media/libaaudio/examples/loopback/jni/Android.mk b/media/libaaudio/examples/loopback/jni/Android.mk
index 1fe3def..aebe877 100644
--- a/media/libaaudio/examples/loopback/jni/Android.mk
+++ b/media/libaaudio/examples/loopback/jni/Android.mk
@@ -10,6 +10,7 @@
# NDK recommends using this kind of relative path instead of an absolute path.
LOCAL_SRC_FILES:= ../src/loopback.cpp
LOCAL_CFLAGS := -Wall -Werror
-LOCAL_SHARED_LIBRARIES := libaaudio
+LOCAL_STATIC_LIBRARIES := libsndfile
+LOCAL_SHARED_LIBRARIES := libaaudio libaudioutils
LOCAL_MODULE := aaudio_loopback
include $(BUILD_EXECUTABLE)
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
index 276b45f..b83851a 100644
--- a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
+++ b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
@@ -30,6 +30,8 @@
#include <stdlib.h>
#include <unistd.h>
+#include <audio_utils/sndfile.h>
+
// Tag for machine readable results as property = value pairs
#define LOOPBACK_RESULT_TAG "RESULT: "
#define LOOPBACK_SAMPLE_RATE 48000
@@ -37,6 +39,7 @@
#define MILLIS_PER_SECOND 1000
#define MAX_ZEROTH_PARTIAL_BINS 40
+constexpr double MAX_ECHO_GAIN = 10.0; // based on experiments, otherwise autocorrelation too noisy
static const float s_Impulse[] = {
0.0f, 0.0f, 0.0f, 0.0f, 0.2f, // silence on each side of the impulse
@@ -156,6 +159,8 @@
const float *needle, int needleSize,
LatencyReport *report) {
const double threshold = 0.1;
+ printf("measureLatencyFromEchos: haystackSize = %d, needleSize = %d\n",
+ haystackSize, needleSize);
// Find first peak
int first = (int) (findFirstMatch(haystack,
@@ -173,7 +178,7 @@
needleSize,
threshold) + 0.5);
- printf("first = %d, again at %d\n", first, again);
+ printf("measureLatencyFromEchos: first = %d, again at %d\n", first, again);
first = again;
// Allocate results array
@@ -270,37 +275,60 @@
return mData;
}
+ void setSampleRate(int32_t sampleRate) {
+ mSampleRate = sampleRate;
+ }
+
+ int32_t getSampleRate() {
+ return mSampleRate;
+ }
+
int save(const char *fileName, bool writeShorts = true) {
+ SNDFILE *sndFile = nullptr;
int written = 0;
- const int chunkSize = 64;
- FILE *fid = fopen(fileName, "wb");
- if (fid == NULL) {
+ SF_INFO info = {
+ .frames = mFrameCounter,
+ .samplerate = mSampleRate,
+ .channels = 1,
+ .format = SF_FORMAT_WAV | (writeShorts ? SF_FORMAT_PCM_16 : SF_FORMAT_FLOAT)
+ };
+
+ sndFile = sf_open(fileName, SFM_WRITE, &info);
+ if (sndFile == nullptr) {
+ printf("AudioRecording::save(%s) failed to open file\n", fileName);
return -errno;
}
- if (writeShorts) {
- int16_t buffer[chunkSize];
- int32_t framesLeft = mFrameCounter;
- int32_t cursor = 0;
- while (framesLeft) {
- int32_t framesToWrite = framesLeft < chunkSize ? framesLeft : chunkSize;
- for (int i = 0; i < framesToWrite; i++) {
- buffer[i] = (int16_t) (mData[cursor++] * 32767);
- }
- written += fwrite(buffer, sizeof(int16_t), framesToWrite, fid);
- framesLeft -= framesToWrite;
- }
- } else {
- written = (int) fwrite(mData, sizeof(float), mFrameCounter, fid);
- }
- fclose(fid);
+ written = sf_writef_float(sndFile, mData, mFrameCounter);
+
+ sf_close(sndFile);
return written;
}
+ int load(const char *fileName) {
+ SNDFILE *sndFile = nullptr;
+ SF_INFO info;
+
+ sndFile = sf_open(fileName, SFM_READ, &info);
+ if (sndFile == nullptr) {
+ printf("AudioRecording::load(%s) failed to open file\n", fileName);
+ return -errno;
+ }
+
+ assert(info.channels == 1);
+
+ allocate(info.frames);
+ mFrameCounter = sf_readf_float(sndFile, mData, info.frames);
+
+ sf_close(sndFile);
+ return mFrameCounter;
+ }
+
private:
float *mData = nullptr;
int32_t mFrameCounter = 0;
int32_t mMaxFrames = 0;
+ int32_t mSampleRate = 48000; // common default
};
// ====================================================================================
@@ -320,11 +348,25 @@
virtual void printStatus() {};
+ virtual int getResult() {
+ return -1;
+ }
+
virtual bool isDone() {
return false;
}
- void setSampleRate(int32_t sampleRate) {
+ virtual int save(const char *fileName) {
+ (void) fileName;
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+
+ virtual int load(const char *fileName) {
+ (void) fileName;
+ return AAUDIO_ERROR_UNIMPLEMENTED;
+ }
+
+ virtual void setSampleRate(int32_t sampleRate) {
mSampleRate = sampleRate;
}
@@ -395,7 +437,13 @@
public:
EchoAnalyzer() : LoopbackProcessor() {
- audioRecorder.allocate(2 * LOOPBACK_SAMPLE_RATE);
+ mAudioRecording.allocate(2 * getSampleRate());
+ mAudioRecording.setSampleRate(getSampleRate());
+ }
+
+ void setSampleRate(int32_t sampleRate) override {
+ LoopbackProcessor::setSampleRate(sampleRate);
+ mAudioRecording.setSampleRate(sampleRate);
}
void reset() override {
@@ -406,8 +454,12 @@
mState = STATE_INITIAL_SILENCE;
}
+ virtual int getResult() {
+ return mState == STATE_DONE ? 0 : -1;
+ }
+
virtual bool isDone() {
- return mState == STATE_DONE;
+ return mState == STATE_DONE || mState == STATE_FAILED;
}
void setGain(float gain) {
@@ -423,31 +475,24 @@
printf("EchoAnalyzer ---------------\n");
printf(LOOPBACK_RESULT_TAG "measured.gain = %f\n", mMeasuredLoopGain);
printf(LOOPBACK_RESULT_TAG "echo.gain = %f\n", mEchoGain);
- printf(LOOPBACK_RESULT_TAG "frame.count = %d\n", mFrameCounter);
printf(LOOPBACK_RESULT_TAG "test.state = %d\n", mState);
if (mMeasuredLoopGain >= 0.9999) {
printf(" ERROR - clipping, turn down volume slightly\n");
} else {
const float *needle = s_Impulse;
int needleSize = (int) (sizeof(s_Impulse) / sizeof(float));
- float *haystack = audioRecorder.getData();
- int haystackSize = audioRecorder.size();
- measureLatencyFromEchos(haystack, haystackSize, needle, needleSize, &latencyReport);
- if (latencyReport.confidence < 0.01) {
- printf(" ERROR - confidence too low = %f\n", latencyReport.confidence);
+ float *haystack = mAudioRecording.getData();
+ int haystackSize = mAudioRecording.size();
+ measureLatencyFromEchos(haystack, haystackSize, needle, needleSize, &mLatencyReport);
+ if (mLatencyReport.confidence < 0.01) {
+ printf(" ERROR - confidence too low = %f\n", mLatencyReport.confidence);
} else {
- double latencyMillis = 1000.0 * latencyReport.latencyInFrames / getSampleRate();
- printf(LOOPBACK_RESULT_TAG "latency.frames = %8.2f\n", latencyReport.latencyInFrames);
+ double latencyMillis = 1000.0 * mLatencyReport.latencyInFrames / getSampleRate();
+ printf(LOOPBACK_RESULT_TAG "latency.frames = %8.2f\n", mLatencyReport.latencyInFrames);
printf(LOOPBACK_RESULT_TAG "latency.msec = %8.2f\n", latencyMillis);
- printf(LOOPBACK_RESULT_TAG "latency.confidence = %8.6f\n", latencyReport.confidence);
+ printf(LOOPBACK_RESULT_TAG "latency.confidence = %8.6f\n", mLatencyReport.confidence);
}
}
-
- {
-#define ECHO_FILENAME "/data/oboe_echo.raw"
- int written = audioRecorder.save(ECHO_FILENAME);
- printf("Echo wrote %d mono samples to %s on Android device\n", written, ECHO_FILENAME);
- }
}
void printStatus() override {
@@ -491,13 +536,18 @@
// If we get several in a row then go to next state.
if (peak > mPulseThreshold) {
if (mDownCounter-- <= 0) {
- nextState = STATE_WAITING_FOR_SILENCE;
//printf("%5d: switch to STATE_WAITING_FOR_SILENCE, measured peak = %f\n",
// mLoopCounter, peak);
mDownCounter = 8;
mMeasuredLoopGain = peak; // assumes original pulse amplitude is one
// Calculate gain that will give us a nice decaying echo.
mEchoGain = mDesiredEchoGain / mMeasuredLoopGain;
+ if (mEchoGain > MAX_ECHO_GAIN) {
+ printf("ERROR - loop gain too low. Increase the volume.\n");
+ nextState = STATE_FAILED;
+ } else {
+ nextState = STATE_WAITING_FOR_SILENCE;
+ }
}
} else {
mDownCounter = 8;
@@ -524,14 +574,14 @@
break;
case STATE_SENDING_PULSE:
- audioRecorder.write(inputData, inputChannelCount, numFrames);
+ mAudioRecording.write(inputData, inputChannelCount, numFrames);
sendImpulse(outputData, outputChannelCount);
nextState = STATE_GATHERING_ECHOS;
//printf("%5d: switch to STATE_GATHERING_ECHOS\n", mLoopCounter);
break;
case STATE_GATHERING_ECHOS:
- numWritten = audioRecorder.write(inputData, inputChannelCount, numFrames);
+ numWritten = mAudioRecording.write(inputData, inputChannelCount, numFrames);
peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
if (peak > mMeasuredLoopGain) {
mMeasuredLoopGain = peak; // AGC might be raising gain so adjust it on the fly.
@@ -565,6 +615,14 @@
mLoopCounter++;
}
+ int save(const char *fileName) override {
+ return mAudioRecording.save(fileName);
+ }
+
+ int load(const char *fileName) override {
+ return mAudioRecording.load(fileName);
+ }
+
private:
enum echo_state_t {
@@ -573,22 +631,22 @@
STATE_WAITING_FOR_SILENCE,
STATE_SENDING_PULSE,
STATE_GATHERING_ECHOS,
- STATE_DONE
+ STATE_DONE,
+ STATE_FAILED
};
- int mDownCounter = 500;
- int mLoopCounter = 0;
- float mPulseThreshold = 0.02f;
- float mSilenceThreshold = 0.002f;
- float mMeasuredLoopGain = 0.0f;
- float mDesiredEchoGain = 0.95f;
- float mEchoGain = 1.0f;
- echo_state_t mState = STATE_INITIAL_SILENCE;
- int32_t mFrameCounter = 0;
+ int mDownCounter = 500;
+ int mLoopCounter = 0;
+ float mPulseThreshold = 0.02f;
+ float mSilenceThreshold = 0.002f;
+ float mMeasuredLoopGain = 0.0f;
+ float mDesiredEchoGain = 0.95f;
+ float mEchoGain = 1.0f;
+ echo_state_t mState = STATE_INITIAL_SILENCE;
- AudioRecording audioRecorder;
- LatencyReport latencyReport;
- PeakDetector mPeakDetector;
+ AudioRecording mAudioRecording; // contains only the input after the gain detection burst
+ LatencyReport mLatencyReport;
+ // PeakDetector mPeakDetector;
};
@@ -602,6 +660,10 @@
class SineAnalyzer : public LoopbackProcessor {
public:
+ virtual int getResult() {
+ return mState == STATE_LOCKED ? 0 : -1;
+ }
+
void report() override {
printf("SineAnalyzer ------------------\n");
printf(LOOPBACK_RESULT_TAG "peak.amplitude = %7.5f\n", mPeakAmplitude);
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index ac6024e..d23d907 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -37,10 +37,10 @@
// Tag for machine readable results as property = value pairs
#define RESULT_TAG "RESULT: "
-#define SAMPLE_RATE 48000
#define NUM_SECONDS 5
#define NUM_INPUT_CHANNELS 1
-#define FILENAME "/data/oboe_input.raw"
+#define FILENAME_ALL "/data/loopback_all.wav"
+#define FILENAME_ECHOS "/data/loopback_echos.wav"
#define APP_VERSION "0.1.22"
struct LoopbackData {
@@ -61,7 +61,7 @@
SineAnalyzer sineAnalyzer;
EchoAnalyzer echoAnalyzer;
- AudioRecording audioRecorder;
+ AudioRecording audioRecording;
LoopbackProcessor *loopbackProcessor;
};
@@ -126,7 +126,7 @@
result = AAUDIO_CALLBACK_RESULT_STOP;
} else if (framesRead > 0) {
- myData->audioRecorder.write(myData->inputData,
+ myData->audioRecording.write(myData->inputData,
myData->actualInputChannelCount,
numFrames);
@@ -176,7 +176,8 @@
printf(" p for _POWER_SAVING\n");
printf(" -t{test} select test mode\n");
printf(" m for sine magnitude\n");
- printf(" e for echo latency (default)\n\n");
+ printf(" e for echo latency (default)\n");
+ printf(" f for file latency, analyzes %s\n\n", FILENAME_ECHOS);
printf(" -x use EXCLUSIVE mode for output\n");
printf(" -X use EXCLUSIVE mode for input\n");
printf("Example: aaudio_loopback -n2 -pl -Pl -x\n");
@@ -205,6 +206,7 @@
enum {
TEST_SINE_MAGNITUDE = 0,
TEST_ECHO_LATENCY,
+ TEST_FILE_LATENCY,
};
static int parseTestMode(char c) {
@@ -217,6 +219,9 @@
case 'e':
testMode = TEST_ECHO_LATENCY;
break;
+ case 'f':
+ testMode = TEST_FILE_LATENCY;
+ break;
default:
printf("ERROR in value test mode %c\n", c);
break;
@@ -254,13 +259,13 @@
int main(int argc, const char **argv)
{
- AAudioArgsParser argParser;
- AAudioSimplePlayer player;
- AAudioSimpleRecorder recorder;
- LoopbackData loopbackData;
- AAudioStream *outputStream = nullptr;
+ AAudioArgsParser argParser;
+ AAudioSimplePlayer player;
+ AAudioSimpleRecorder recorder;
+ LoopbackData loopbackData;
+ AAudioStream *outputStream = nullptr;
- aaudio_result_t result = AAUDIO_OK;
+ aaudio_result_t result = AAUDIO_OK;
aaudio_sharing_mode_t requestedInputSharingMode = AAUDIO_SHARING_MODE_SHARED;
int requestedInputChannelCount = NUM_INPUT_CHANNELS;
const aaudio_format_t requestedInputFormat = AAUDIO_FORMAT_PCM_I16;
@@ -268,6 +273,7 @@
aaudio_format_t actualInputFormat;
aaudio_format_t actualOutputFormat;
aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+ int32_t actualSampleRate = 0;
int testMode = TEST_ECHO_LATENCY;
double gain = 1.0;
@@ -324,7 +330,6 @@
int32_t requestedDuration = argParser.getDurationSeconds();
int32_t recordingDuration = std::min(60, requestedDuration);
- loopbackData.audioRecorder.allocate(recordingDuration * SAMPLE_RATE);
switch(testMode) {
case TEST_SINE_MAGNITUDE:
@@ -334,6 +339,16 @@
loopbackData.echoAnalyzer.setGain(gain);
loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
break;
+ case TEST_FILE_LATENCY: {
+ loopbackData.echoAnalyzer.setGain(gain);
+
+ loopbackData.loopbackProcessor = &loopbackData.echoAnalyzer;
+ int read = loopbackData.loopbackProcessor->load(FILENAME_ECHOS);
+ printf("main() read %d mono samples from %s on Android device\n", read, FILENAME_ECHOS);
+ loopbackData.loopbackProcessor->report();
+ return 0;
+ }
+ break;
default:
exit(1);
break;
@@ -344,7 +359,7 @@
result = player.open(argParser, MyDataCallbackProc, MyErrorCallbackProc, &loopbackData);
if (result != AAUDIO_OK) {
fprintf(stderr, "ERROR - player.open() returned %d\n", result);
- goto finish;
+ exit(1);
}
outputStream = player.getStream();
argParser.compareWithStream(outputStream);
@@ -352,6 +367,10 @@
actualOutputFormat = AAudioStream_getFormat(outputStream);
assert(actualOutputFormat == AAUDIO_FORMAT_PCM_FLOAT);
+ actualSampleRate = AAudioStream_getSampleRate(outputStream);
+ loopbackData.audioRecording.allocate(recordingDuration * actualSampleRate);
+ loopbackData.audioRecording.setSampleRate(actualSampleRate);
+
printf("INPUT stream ----------------------------------------\n");
// Use different parameters for the input.
argParser.setNumberOfBursts(AAUDIO_UNSPECIFIED);
@@ -380,7 +399,7 @@
// Allocate a buffer for the audio data.
loopbackData.inputFramesMaximum = 32 * framesPerBurst;
- loopbackData.inputBuffersToDiscard = 100;
+ loopbackData.inputBuffersToDiscard = 200;
loopbackData.inputData = new int16_t[loopbackData.inputFramesMaximum
* loopbackData.actualInputChannelCount];
@@ -436,25 +455,31 @@
}
}
- printf("input error = %d = %s\n",
- loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
+ if (loopbackData.loopbackProcessor->getResult() < 0) {
+ printf("Test failed!\n");
+ } else {
+ printf("input error = %d = %s\n",
+ loopbackData.inputError, AAudio_convertResultToText(loopbackData.inputError));
- printf("AAudioStream_getXRunCount %d\n", AAudioStream_getXRunCount(outputStream));
- printf("framesRead = %8d\n", (int) AAudioStream_getFramesRead(outputStream));
- printf("framesWritten = %8d\n", (int) AAudioStream_getFramesWritten(outputStream));
- printf("min numFrames = %8d\n", (int) loopbackData.minNumFrames);
- printf("max numFrames = %8d\n", (int) loopbackData.maxNumFrames);
+ printf("AAudioStream_getXRunCount %d\n", AAudioStream_getXRunCount(outputStream));
+ printf("framesRead = %8d\n", (int) AAudioStream_getFramesRead(outputStream));
+ printf("framesWritten = %8d\n", (int) AAudioStream_getFramesWritten(outputStream));
+ printf("min numFrames = %8d\n", (int) loopbackData.minNumFrames);
+ printf("max numFrames = %8d\n", (int) loopbackData.maxNumFrames);
- if (loopbackData.inputError == AAUDIO_OK) {
- if (testMode == TEST_SINE_MAGNITUDE) {
- printAudioGraph(loopbackData.audioRecorder, 200);
+ if (loopbackData.inputError == AAUDIO_OK) {
+ if (testMode == TEST_SINE_MAGNITUDE) {
+ printAudioGraph(loopbackData.audioRecording, 200);
+ }
+ loopbackData.loopbackProcessor->report();
}
- loopbackData.loopbackProcessor->report();
- }
- {
- int written = loopbackData.audioRecorder.save(FILENAME);
- printf("main() wrote %d mono samples to %s on Android device\n", written, FILENAME);
+ int written = loopbackData.loopbackProcessor->save(FILENAME_ECHOS);
+ printf("main() wrote %d mono samples to %s on Android device\n", written,
+ FILENAME_ECHOS);
+ printf("main() loopbackData.audioRecording.getSampleRate() = %d\n", loopbackData.audioRecording.getSampleRate());
+ written = loopbackData.audioRecording.save(FILENAME_ALL);
+ printf("main() wrote %d mono samples to %s on Android device\n", written, FILENAME_ALL);
}
finish:
diff --git a/media/libaaudio/examples/utils/AAudioSimplePlayer.h b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
index 3fafecf..54b77ba 100644
--- a/media/libaaudio/examples/utils/AAudioSimplePlayer.h
+++ b/media/libaaudio/examples/utils/AAudioSimplePlayer.h
@@ -170,7 +170,6 @@
aaudio_result_t close() {
if (mStream != nullptr) {
- printf("call AAudioStream_close(%p)\n", mStream); fflush(stdout);
AAudioStream_close(mStream);
mStream = nullptr;
}
diff --git a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
index 1344273..869fad0 100644
--- a/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
+++ b/media/libaaudio/examples/utils/AAudioSimpleRecorder.h
@@ -178,7 +178,6 @@
aaudio_result_t close() {
if (mStream != nullptr) {
- printf("call AAudioStream_close(%p)\n", mStream); fflush(stdout);
AAudioStream_close(mStream);
mStream = nullptr;
}
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index c33f9f5..807f24d 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -594,7 +594,9 @@
deltaSmpl = CAPTURE_BUF_SIZE;
}
- int32_t capturePoint = (int32_t)pContext->mCaptureIdx - deltaSmpl;
+ int32_t capturePoint;
+ //capturePoint = (int32_t)pContext->mCaptureIdx - deltaSmpl;
+ __builtin_sub_overflow((int32_t)pContext->mCaptureIdx, deltaSmpl, &capturePoint);
// a negative capturePoint means we wrap the buffer.
if (capturePoint < 0) {
uint32_t size = -capturePoint;
diff --git a/media/libmedia/nuplayer2/HTTPLiveSource.cpp b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
index 14b67cad..e0e3df9 100644
--- a/media/libmedia/nuplayer2/HTTPLiveSource.cpp
+++ b/media/libmedia/nuplayer2/HTTPLiveSource.cpp
@@ -103,7 +103,8 @@
if (mLiveLooper == NULL) {
mLiveLooper = new ALooper;
mLiveLooper->setName("http live");
- mLiveLooper->start();
+ mLiveLooper->start(false, /* runOnCallingThread */
+ true /* canCallJava */);
mLiveLooper->registerHandler(this);
}
diff --git a/media/libmedia/omx/1.0/WOmxNode.cpp b/media/libmedia/omx/1.0/WOmxNode.cpp
index 0b40e8d..2cd8b76 100644
--- a/media/libmedia/omx/1.0/WOmxNode.cpp
+++ b/media/libmedia/omx/1.0/WOmxNode.cpp
@@ -151,7 +151,8 @@
hidl_handle const& outNativeHandle) {
fnStatus = toStatusT(status);
*buffer = outBuffer;
- *native_handle = NativeHandle::create(
+ *native_handle = outNativeHandle.getNativeHandle() == nullptr ?
+ nullptr : NativeHandle::create(
native_handle_clone(outNativeHandle), true);
}));
return transStatus == NO_ERROR ? fnStatus : transStatus;
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 1e2e684..7eff8eb 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -1014,7 +1014,8 @@
mFetcherLooper = new ALooper();
mFetcherLooper->setName("Fetcher");
- mFetcherLooper->start(false, false);
+ mFetcherLooper->start(false, /* runOnCallingThread */
+ true /* canCallJava */);
}
// create fetcher to fetch the master playlist
diff --git a/media/libstagefright/omx/1.0/WOmxNode.cpp b/media/libstagefright/omx/1.0/WOmxNode.cpp
index 9f82283..1dc7c7b 100644
--- a/media/libstagefright/omx/1.0/WOmxNode.cpp
+++ b/media/libstagefright/omx/1.0/WOmxNode.cpp
@@ -154,7 +154,8 @@
hidl_handle const& outNativeHandle) {
fnStatus = toStatusT(status);
*buffer = outBuffer;
- *native_handle = NativeHandle::create(
+ *native_handle = outNativeHandle.getNativeHandle() == nullptr ?
+ nullptr : NativeHandle::create(
native_handle_clone(outNativeHandle), true);
}));
return transStatus == NO_ERROR ? fnStatus : transStatus;
diff --git a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
index 8d8a2d9..a79d403 100644
--- a/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
+++ b/media/libstagefright/omx/include/media/stagefright/omx/1.0/Conversion.h
@@ -1862,7 +1862,8 @@
inline size_t getFlattenedSize(HGraphicBufferProducer::QueueBufferInput const& t) {
return minFlattenedSize(t) +
getFenceFlattenedSize(t.fence) +
- getFlattenedSize(t.surfaceDamage);
+ getFlattenedSize(t.surfaceDamage) +
+ sizeof(HdrMetadata::validTypes);
}
/**
@@ -1916,7 +1917,12 @@
if (status != NO_ERROR) {
return status;
}
- return flatten(t.surfaceDamage, buffer, size);
+ status = flatten(t.surfaceDamage, buffer, size);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ FlattenableUtils::write(buffer, size, decltype(HdrMetadata::validTypes)(0));
+ return NO_ERROR;
}
/**
@@ -1968,6 +1974,7 @@
if (status != NO_ERROR) {
return status;
}
+ // HdrMetadata ignored
return unflatten(&(t->surfaceDamage), buffer, size);
}
diff --git a/packages/MediaUpdate/Android.mk b/packages/MediaUpdate/Android.mk
index e757098..4a71401 100644
--- a/packages/MediaUpdate/Android.mk
+++ b/packages/MediaUpdate/Android.mk
@@ -25,4 +25,10 @@
# TODO: create a separate key for this package.
LOCAL_CERTIFICATE := platform
+# TODO: Use System SDK once public APIs are approved
+# LOCAL_SDK_VERSION := system_current
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+LOCAL_PROGUARD_FLAG_FILES := proguard.cfg
+
include $(BUILD_PACKAGE)
diff --git a/packages/MediaUpdate/proguard.cfg b/packages/MediaUpdate/proguard.cfg
new file mode 100644
index 0000000..874dbf5
--- /dev/null
+++ b/packages/MediaUpdate/proguard.cfg
@@ -0,0 +1,20 @@
+#
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Keep entry point for updatable Java classes
+-keep public class com.android.media.update.ApiFactory {
+ public static java.lang.Object initialize(android.content.Context);
+}
diff --git a/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java b/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java
new file mode 100644
index 0000000..1cdd177
--- /dev/null
+++ b/packages/MediaUpdate/src/com/android/media/update/ApiFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.update;
+
+import android.content.Context;
+import android.media.update.MediaController2Provider;
+import android.media.update.StaticProvider;
+import android.media.update.ViewProvider;
+import android.widget.MediaController2;
+
+import com.android.widget.MediaController2Impl;
+
+public class ApiFactory implements StaticProvider {
+ private final Context mContext;
+
+ public ApiFactory(Context context) {
+ mContext = context;
+ }
+
+ public static Object initialize(Context context) throws ReflectiveOperationException {
+ return new ApiFactory(context);
+ }
+
+ @Override
+ public MediaController2Provider createMediaController2(
+ MediaController2 instance, ViewProvider superProvider) {
+ return new MediaController2Impl(instance, superProvider);
+ }
+}
diff --git a/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java b/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java
new file mode 100644
index 0000000..d322a20
--- /dev/null
+++ b/packages/MediaUpdate/src/com/android/widget/MediaController2Impl.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.widget;
+
+import android.graphics.Canvas;
+import android.media.session.MediaController;
+import android.media.update.MediaController2Provider;
+import android.media.update.ViewProvider;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.MediaController2;
+
+public class MediaController2Impl implements MediaController2Provider {
+ private final MediaController2 mInstance;
+ private final ViewProvider mSuperProvider;
+
+ public MediaController2Impl(MediaController2 instance, ViewProvider superProvider) {
+ mInstance = instance;
+ mSuperProvider = superProvider;
+
+ // TODO: Implement
+ }
+
+ @Override
+ public void setController_impl(MediaController controller) {
+ // TODO: Implement
+ }
+
+ @Override
+ public void setAnchorView_impl(View view) {
+ // TODO: Implement
+ }
+
+ @Override
+ public void show_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void show_impl(int timeout) {
+ // TODO: Implement
+ }
+
+ @Override
+ public boolean isShowing_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public void hide_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void setPrevNextListeners_impl(OnClickListener next, OnClickListener prev) {
+ // TODO: Implement
+ }
+
+ @Override
+ public void showCCButton_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public boolean isPlaying_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public int getCurrentPosition_impl() {
+ // TODO: Implement
+ return 0;
+ }
+
+ @Override
+ public int getBufferPercentage_impl() {
+ // TODO: Implement
+ return 0;
+ }
+
+ @Override
+ public boolean canPause_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public boolean canSeekBackward_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public boolean canSeekForward_impl() {
+ // TODO: Implement
+ return false;
+ }
+
+ @Override
+ public void showSubtitle_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void hideSubtitle_impl() {
+ // TODO: Implement
+ }
+
+ @Override
+ public void onAttachedToWindow_impl() {
+ mSuperProvider.onAttachedToWindow_impl();
+ // TODO: Implement
+ }
+
+ @Override
+ public void onDetachedFromWindow_impl() {
+ mSuperProvider.onDetachedFromWindow_impl();
+ // TODO: Implement
+ }
+
+ @Override
+ public void onLayout_impl(boolean changed, int left, int top, int right, int bottom) {
+ mSuperProvider.onLayout_impl(changed, left, top, right, bottom);
+ // TODO: Implement
+ }
+
+ @Override
+ public void draw_impl(Canvas canvas) {
+ mSuperProvider.draw_impl(canvas);
+ // TODO: Implement
+ }
+
+ @Override
+ public CharSequence getAccessibilityClassName_impl() {
+ // TODO: Implement
+ return MediaController2.class.getName();
+ }
+
+ @Override
+ public boolean onTouchEvent_impl(MotionEvent ev) {
+ // TODO: Implement
+ return mSuperProvider.onTouchEvent_impl(ev);
+ }
+
+ @Override
+ public boolean onTrackballEvent_impl(MotionEvent ev) {
+ // TODO: Implement
+ return mSuperProvider.onTrackballEvent_impl(ev);
+ }
+
+ @Override
+ public boolean onKeyDown_impl(int keyCode, KeyEvent event) {
+ // TODO: Implement
+ return mSuperProvider.onKeyDown_impl(keyCode, event);
+ }
+
+ @Override
+ public void onFinishInflate_impl() {
+ mSuperProvider.onFinishInflate_impl();
+ // TODO: Implement
+ }
+
+ @Override
+ public boolean dispatchKeyEvent_impl(KeyEvent event) {
+ // TODO: Implement
+ return mSuperProvider.dispatchKeyEvent_impl(event);
+ }
+
+ @Override
+ public void setEnabled_impl(boolean enabled) {
+ mSuperProvider.setEnabled_impl(enabled);
+ // TODO: Implement
+ }
+}
diff --git a/services/audioflinger/FastThread.cpp b/services/audioflinger/FastThread.cpp
index 85865b7..dc15487 100644
--- a/services/audioflinger/FastThread.cpp
+++ b/services/audioflinger/FastThread.cpp
@@ -297,7 +297,8 @@
size_t i = mBounds & (mDumpState->mSamplingN - 1);
mBounds = (mBounds & 0xFFFF0000) | ((mBounds + 1) & 0xFFFF);
if (mFull) {
- mBounds += 0x10000;
+ //mBounds += 0x10000;
+ __builtin_add_overflow(mBounds, 0x10000, &mBounds);
} else if (!(mBounds & (mDumpState->mSamplingN - 1))) {
mFull = true;
}
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index ae3bbc1..2ff200d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -643,6 +643,14 @@
dprintf(fd, " API2 camera characteristics:\n");
info2.dump(fd, /*verbosity*/ 2, /*indentation*/ 4);
}
+
+ dprintf(fd, "== Camera HAL device %s (v%d.%d) dumpState: ==\n", device->mName.c_str(),
+ device->mVersion.get_major(), device->mVersion.get_minor());
+ res = device->dumpState(fd);
+ if (res != OK) {
+ dprintf(fd, " <Error dumping device %s state: %s (%d)>\n",
+ device->mName.c_str(), strerror(-res), res);
+ }
}
return OK;
}
@@ -908,6 +916,17 @@
return OK;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo1::dumpState(int fd) const {
+ native_handle_t* handle = native_handle_create(1,0);
+ handle->data[0] = fd;
+ hardware::Return<Status> s = mInterface->dumpState(handle);
+ native_handle_delete(handle);
+ if (!s.isOk()) {
+ return INVALID_OPERATION;
+ }
+ return mapToStatusT(s);
+}
+
CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
const metadata_vendor_id_t tagId, const std::string &id,
uint16_t minorVersion,
@@ -1011,6 +1030,17 @@
return isBackwardCompatible;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::dumpState(int fd) const {
+ native_handle_t* handle = native_handle_create(1,0);
+ handle->data[0] = fd;
+ auto ret = mInterface->dumpState(handle);
+ native_handle_delete(handle);
+ if (!ret.isOk()) {
+ return INVALID_OPERATION;
+ }
+ return OK;
+}
+
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
CameraMetadata *characteristics) const {
if (characteristics == nullptr) return BAD_VALUE;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e82282f..0f1f07b 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -293,6 +293,7 @@
virtual status_t setTorchMode(bool enabled) = 0;
virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
virtual bool isAPI1Compatible() const = 0;
+ virtual status_t dumpState(int fd) const = 0;
virtual status_t getCameraCharacteristics(CameraMetadata *characteristics) const {
(void) characteristics;
return INVALID_OPERATION;
@@ -326,6 +327,7 @@
virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
//In case of Device1Info assume that we are always API1 compatible
virtual bool isAPI1Compatible() const override { return true; }
+ virtual status_t dumpState(int fd) const override;
DeviceInfo1(const std::string& name, const metadata_vendor_id_t tagId,
const std::string &id, uint16_t minorVersion,
const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
@@ -343,6 +345,7 @@
virtual status_t setTorchMode(bool enabled) override;
virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
virtual bool isAPI1Compatible() const override;
+ virtual status_t dumpState(int fd) const override;
virtual status_t getCameraCharacteristics(
CameraMetadata *characteristics) const override;