Merge "audio policy: fix volume burst on user switch." into nyc-dev
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 8f1115a..0b758b6 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "ACameraDevice"
#include <vector>
-#include <utility>
#include <inttypes.h>
#include <android/hardware/ICameraService.h>
#include <camera2/SubmitInfo.h>
@@ -43,6 +42,7 @@
const char* CameraDevice::kCaptureFailureKey = "CaptureFailure";
const char* CameraDevice::kSequenceIdKey = "SequenceId";
const char* CameraDevice::kFrameNumberKey = "FrameNumber";
+const char* CameraDevice::kAnwKey = "Anw";
/**
* CameraDevice Implementation
@@ -465,10 +465,9 @@
}
camera_status_t
-CameraDevice::getIGBPfromSessionOutput(
- const ACaptureSessionOutput& config,
+CameraDevice::getIGBPfromAnw(
+ ANativeWindow* anw,
sp<IGraphicBufferProducer>& out) {
- ANativeWindow* anw = config.mWindow;
if (anw == nullptr) {
ALOGE("Error: output ANativeWindow is null");
return ACAMERA_ERROR_INVALID_PARAMETER;
@@ -514,26 +513,28 @@
return ret;
}
- std::set<OutputConfiguration> outputSet;
+ std::set<std::pair<ANativeWindow*, OutputConfiguration>> outputSet;
for (auto outConfig : outputs->mOutputs) {
+ ANativeWindow* anw = outConfig.mWindow;
sp<IGraphicBufferProducer> iGBP(nullptr);
- ret = getIGBPfromSessionOutput(outConfig, iGBP);
+ ret = getIGBPfromAnw(anw, iGBP);
if (ret != ACAMERA_OK) {
return ret;
}
- outputSet.insert(OutputConfiguration(iGBP, outConfig.mRotation));
+ outputSet.insert(std::make_pair(
+ anw, OutputConfiguration(iGBP, outConfig.mRotation)));
}
- std::set<OutputConfiguration> addSet = outputSet;
+ auto addSet = outputSet;
std::vector<int> deleteList;
// Determine which streams need to be created, which to be deleted
for (auto& kvPair : mConfiguredOutputs) {
int streamId = kvPair.first;
- OutputConfiguration& outConfig = kvPair.second;
- if (outputSet.count(outConfig) == 0) {
+ auto& outputPair = kvPair.second;
+ if (outputSet.count(outputPair) == 0) {
deleteList.push_back(streamId); // Need to delete a no longer needed stream
} else {
- addSet.erase(outConfig); // No need to add already existing stream
+ addSet.erase(outputPair); // No need to add already existing stream
}
}
@@ -585,15 +586,15 @@
}
// add new streams
- for (auto outConfig : addSet) {
+ for (auto outputPair : addSet) {
int streamId;
- remoteRet = mRemote->createStream(outConfig, &streamId);
+ remoteRet = mRemote->createStream(outputPair.second, &streamId);
if (!remoteRet.isOk()) {
ALOGE("Camera device %s failed to create stream: %s", getId(),
remoteRet.toString8().string());
return ACAMERA_ERROR_UNKNOWN;
}
- mConfiguredOutputs.insert(std::make_pair(streamId, outConfig));
+ mConfiguredOutputs.insert(std::make_pair(streamId, outputPair));
}
remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false);
@@ -682,26 +683,51 @@
int sequenceId = resultExtras.requestId;
int64_t frameNumber = resultExtras.frameNumber;
int32_t burstId = resultExtras.burstId;
-
- // No way to report buffer error now
- if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER) {
- ALOGE("Camera %s Lost output buffer for frame %" PRId64,
- getId(), frameNumber);
+ auto it = mSequenceCallbackMap.find(sequenceId);
+ if (it == mSequenceCallbackMap.end()) {
+ ALOGE("%s: Error: capture sequence index %d not found!",
+ __FUNCTION__, sequenceId);
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
return;
}
- // Fire capture failure callback if there is one registered
- auto it = mSequenceCallbackMap.find(sequenceId);
- if (it != mSequenceCallbackMap.end()) {
- CallbackHolder cbh = (*it).second;
- ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
- sp<ACameraCaptureSession> session = cbh.mSession;
- if ((size_t) burstId >= cbh.mRequests.size()) {
- ALOGE("%s: Error: request index %d out of bound (size %zu)",
- __FUNCTION__, burstId, cbh.mRequests.size());
+
+ CallbackHolder cbh = (*it).second;
+ sp<ACameraCaptureSession> session = cbh.mSession;
+ if ((size_t) burstId >= cbh.mRequests.size()) {
+ ALOGE("%s: Error: request index %d out of bound (size %zu)",
+ __FUNCTION__, burstId, cbh.mRequests.size());
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
+ return;
+ }
+ sp<CaptureRequest> request = cbh.mRequests[burstId];
+
+ // Handle buffer error
+ if (errorCode == hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER) {
+ int32_t streamId = resultExtras.errorStreamId;
+ ACameraCaptureSession_captureCallback_bufferLost onBufferLost =
+ cbh.mCallbacks.onCaptureBufferLost;
+ auto outputPairIt = mConfiguredOutputs.find(streamId);
+ if (outputPairIt == mConfiguredOutputs.end()) {
+ ALOGE("%s: Error: stream id %d does not exist", __FUNCTION__, streamId);
setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
return;
}
- sp<CaptureRequest> request = cbh.mRequests[burstId];
+ ANativeWindow* anw = outputPairIt->second.first;
+
+ ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
+ getId(), anw, frameNumber);
+
+ sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
+ msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setObject(kSessionSpKey, session);
+ msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
+ msg->setObject(kCaptureRequestKey, request);
+ msg->setPointer(kAnwKey, (void*) anw);
+ msg->setInt64(kFrameNumberKey, frameNumber);
+ msg->post();
+ } else { // Handle other capture failures
+ // Fire capture failure callback if there is one registered
+ ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
sp<CameraCaptureFailure> failure(new CameraCaptureFailure());
failure->frameNumber = frameNumber;
// TODO: refine this when implementing flush
@@ -717,11 +743,12 @@
msg->setObject(kCaptureRequestKey, request);
msg->setObject(kCaptureFailureKey, failure);
msg->post();
- }
- // Update tracker
- mFrameNumberTracker.updateTracker(frameNumber, /*isError*/true);
- checkAndFireSequenceCompleteLocked();
+ // Update tracker
+ mFrameNumberTracker.updateTracker(frameNumber, /*isError*/true);
+ checkAndFireSequenceCompleteLocked();
+ }
+ return;
}
void CameraDevice::CallbackHandler::onMessageReceived(
@@ -735,6 +762,7 @@
case kWhatCaptureFail:
case kWhatCaptureSeqEnd:
case kWhatCaptureSeqAbort:
+ case kWhatCaptureBufferLost:
ALOGV("%s: Received msg %d", __FUNCTION__, msg->what());
break;
default:
@@ -801,6 +829,7 @@
case kWhatCaptureFail:
case kWhatCaptureSeqEnd:
case kWhatCaptureSeqAbort:
+ case kWhatCaptureBufferLost:
{
sp<RefBase> obj;
found = msg->findObject(kSessionSpKey, &obj);
@@ -814,6 +843,7 @@
case kWhatCaptureStart:
case kWhatCaptureResult:
case kWhatCaptureFail:
+ case kWhatCaptureBufferLost:
found = msg->findObject(kCaptureRequestKey, &obj);
if (!found) {
ALOGE("%s: Cannot find capture request!", __FUNCTION__);
@@ -956,6 +986,37 @@
(*onSeqAbort)(context, session.get(), seqId);
break;
}
+ case kWhatCaptureBufferLost:
+ {
+ ACameraCaptureSession_captureCallback_bufferLost onBufferLost;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onBufferLost);
+ if (!found) {
+ ALOGE("%s: Cannot find buffer lost callback!", __FUNCTION__);
+ return;
+ }
+ if (onBufferLost == nullptr) {
+ return;
+ }
+
+ ANativeWindow* anw;
+ found = msg->findPointer(kAnwKey, (void**) &anw);
+ if (!found) {
+ ALOGE("%s: Cannot find ANativeWindow!", __FUNCTION__);
+ return;
+ }
+
+ int64_t frameNumber;
+ found = msg->findInt64(kFrameNumberKey, &frameNumber);
+ if (!found) {
+ ALOGE("%s: Cannot find frame number!", __FUNCTION__);
+ return;
+ }
+
+ ACaptureRequest* request = allocateACaptureRequest(requestSp);
+ (*onBufferLost)(context, session.get(), request, anw, frameNumber);
+ freeACaptureRequest(request);
+ break;
+ }
}
break;
}
@@ -1239,8 +1300,8 @@
}
CameraMetadata metadataCopy = metadata;
- // Copied from java implmentation. Why do we need this?
metadataCopy.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, dev->mShadingMapSize, /*data_count*/2);
+ metadataCopy.update(ANDROID_SYNC_FRAME_NUMBER, &frameNumber, /*data_count*/1);
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index fd51a81..3ccf95a 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -20,6 +20,7 @@
#include <map>
#include <set>
#include <atomic>
+#include <utility>
#include <utils/StrongPointer.h>
#include <utils/Mutex.h>
#include <utils/String8.h>
@@ -134,8 +135,8 @@
camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs);
- static camera_status_t getIGBPfromSessionOutput(
- const ACaptureSessionOutput& config, sp<IGraphicBufferProducer>& out);
+ static camera_status_t getIGBPfromAnw(
+ ANativeWindow* anw, sp<IGraphicBufferProducer>& out);
static camera_status_t getSurfaceFromANativeWindow(
ANativeWindow* anw, sp<Surface>& out);
@@ -147,8 +148,8 @@
const sp<ServiceCallback> mServiceCallback;
ACameraDevice* mWrapper;
- // stream id -> OutputConfiguration map
- std::map<int, OutputConfiguration> mConfiguredOutputs;
+ // stream id -> pair of (ANW* from application, OutputConfiguration used for camera service)
+ std::map<int, std::pair<ANativeWindow*, OutputConfiguration>> mConfiguredOutputs;
// TODO: maybe a bool will suffice for synchronous implementation?
std::atomic_bool mClosing;
@@ -171,16 +172,17 @@
// definition of handler and message
enum {
// Device state callbacks
- kWhatOnDisconnected, // onDisconnected
- kWhatOnError, // onError
+ kWhatOnDisconnected, // onDisconnected
+ kWhatOnError, // onError
// Session state callbacks
- kWhatSessionStateCb, // onReady, onActive
+ kWhatSessionStateCb, // onReady, onActive
// Capture callbacks
- kWhatCaptureStart, // onCaptureStarted
- kWhatCaptureResult, // onCaptureProgressed, onCaptureCompleted
- kWhatCaptureFail, // onCaptureFailed
- kWhatCaptureSeqEnd, // onCaptureSequenceCompleted
- kWhatCaptureSeqAbort // onCaptureSequenceAborted
+ kWhatCaptureStart, // onCaptureStarted
+ kWhatCaptureResult, // onCaptureProgressed, onCaptureCompleted
+ kWhatCaptureFail, // onCaptureFailed
+ kWhatCaptureSeqEnd, // onCaptureSequenceCompleted
+ kWhatCaptureSeqAbort, // onCaptureSequenceAborted
+ kWhatCaptureBufferLost // onCaptureBufferLost
};
static const char* kContextKey;
static const char* kDeviceKey;
@@ -193,6 +195,7 @@
static const char* kCaptureFailureKey;
static const char* kSequenceIdKey;
static const char* kFrameNumberKey;
+ static const char* kAnwKey;
class CallbackHandler : public AHandler {
public:
CallbackHandler() {}
@@ -227,7 +230,7 @@
if (cbs != nullptr) {
return *cbs;
}
- return { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr };
+ return { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr };
}
sp<ACameraCaptureSession> mSession;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 8366ade..ab99e38 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -294,7 +294,6 @@
case ACAMERA_LENS_FOCUS_DISTANCE:
case ACAMERA_LENS_OPTICAL_STABILIZATION_MODE:
case ACAMERA_NOISE_REDUCTION_MODE:
- case ACAMERA_REQUEST_ID:
case ACAMERA_SCALER_CROP_REGION:
case ACAMERA_SENSOR_EXPOSURE_TIME:
case ACAMERA_SENSOR_FRAME_DURATION:
@@ -311,9 +310,7 @@
case ACAMERA_TONEMAP_MODE:
case ACAMERA_TONEMAP_GAMMA:
case ACAMERA_TONEMAP_PRESET_CURVE:
- case ACAMERA_LED_TRANSMIT:
case ACAMERA_BLACK_LEVEL_LOCK:
- case ACAMERA_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR:
return true;
default:
return false;
diff --git a/include/camera/ndk/NdkCameraCaptureSession.h b/include/camera/ndk/NdkCameraCaptureSession.h
index 5d5cae2..085b614 100644
--- a/include/camera/ndk/NdkCameraCaptureSession.h
+++ b/include/camera/ndk/NdkCameraCaptureSession.h
@@ -23,6 +23,7 @@
* Do not reference types that are not part of the NDK.
* Do not #include files that aren't part of the NDK.
*/
+#include <android/native_window.h>
#include "NdkCameraError.h"
#include "NdkCameraMetadata.h"
@@ -78,6 +79,10 @@
void* context, ACameraCaptureSession* session,
int sequenceId);
+typedef void (*ACameraCaptureSession_captureCallback_bufferLost)(
+ void* context, ACameraCaptureSession* session,
+ ACaptureRequest* request, ANativeWindow* window, int64_t frameNumber);
+
typedef struct ACameraCaptureSession_captureCallbacks {
void* context;
ACameraCaptureSession_captureCallback_start onCaptureStarted;
@@ -86,6 +91,7 @@
ACameraCaptureSession_captureCallback_failed onCaptureFailed;
ACameraCaptureSession_captureCallback_sequenceEnd onCaptureSequenceCompleted;
ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+ ACameraCaptureSession_captureCallback_bufferLost onCaptureBufferLost;
} ACameraCaptureSession_captureCallbacks;
enum {
diff --git a/include/camera/ndk/NdkCameraMetadataTags.h b/include/camera/ndk/NdkCameraMetadataTags.h
index afdd269..3ec164c 100644
--- a/include/camera/ndk/NdkCameraMetadataTags.h
+++ b/include/camera/ndk/NdkCameraMetadataTags.h
@@ -172,8 +172,6 @@
ACAMERA_CONTROL_START + 32,
ACAMERA_CONTROL_AWB_STATE = // byte (enum)
ACAMERA_CONTROL_START + 34,
- ACAMERA_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS = // int32[5*n]
- ACAMERA_CONTROL_START + 35,
ACAMERA_CONTROL_AE_LOCK_AVAILABLE = // byte (enum)
ACAMERA_CONTROL_START + 36,
ACAMERA_CONTROL_AWB_LOCK_AVAILABLE = // byte (enum)
@@ -276,16 +274,6 @@
ACAMERA_NOISE_REDUCTION_START + 2,
ACAMERA_NOISE_REDUCTION_END,
- ACAMERA_QUIRKS_USE_PARTIAL_RESULT = // Deprecated! DO NOT USE
- ACAMERA_QUIRKS_START + 3,
- ACAMERA_QUIRKS_PARTIAL_RESULT = // Deprecated! DO NOT USE
- ACAMERA_QUIRKS_START + 4,
- ACAMERA_QUIRKS_END,
-
- ACAMERA_REQUEST_FRAME_COUNT = // Deprecated! DO NOT USE
- ACAMERA_REQUEST_START,
- ACAMERA_REQUEST_ID = // int32
- ACAMERA_REQUEST_START + 1,
ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS = // int32[3]
ACAMERA_REQUEST_START + 6,
ACAMERA_REQUEST_MAX_NUM_INPUT_STREAMS = // int32
@@ -308,20 +296,8 @@
ACAMERA_SCALER_CROP_REGION = // int32[4]
ACAMERA_SCALER_START,
- ACAMERA_SCALER_AVAILABLE_FORMATS = // Deprecated! DO NOT USE
- ACAMERA_SCALER_START + 1,
- ACAMERA_SCALER_AVAILABLE_JPEG_MIN_DURATIONS = // Deprecated! DO NOT USE
- ACAMERA_SCALER_START + 2,
- ACAMERA_SCALER_AVAILABLE_JPEG_SIZES = // Deprecated! DO NOT USE
- ACAMERA_SCALER_START + 3,
ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM = // float
ACAMERA_SCALER_START + 4,
- ACAMERA_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS = // Deprecated! DO NOT USE
- ACAMERA_SCALER_START + 5,
- ACAMERA_SCALER_AVAILABLE_PROCESSED_SIZES = // Deprecated! DO NOT USE
- ACAMERA_SCALER_START + 6,
- ACAMERA_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP = // int32
- ACAMERA_SCALER_START + 9,
ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
ACAMERA_SCALER_START + 10,
ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS = // int64[4*n]
@@ -430,10 +406,6 @@
ACAMERA_STATISTICS_START + 10,
ACAMERA_STATISTICS_LENS_SHADING_MAP = // float[4*n*m]
ACAMERA_STATISTICS_START + 11,
- ACAMERA_STATISTICS_PREDICTED_COLOR_GAINS = // Deprecated! DO NOT USE
- ACAMERA_STATISTICS_START + 12,
- ACAMERA_STATISTICS_PREDICTED_COLOR_TRANSFORM = // Deprecated! DO NOT USE
- ACAMERA_STATISTICS_START + 13,
ACAMERA_STATISTICS_SCENE_FLICKER = // byte (enum)
ACAMERA_STATISTICS_START + 14,
ACAMERA_STATISTICS_HOT_PIXEL_MAP = // int32[2*n]
@@ -470,12 +442,6 @@
ACAMERA_TONEMAP_START + 7,
ACAMERA_TONEMAP_END,
- ACAMERA_LED_TRANSMIT = // byte (enum)
- ACAMERA_LED_START,
- ACAMERA_LED_AVAILABLE_LEDS = // byte[n] (enum)
- ACAMERA_LED_START + 1,
- ACAMERA_LED_END,
-
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (enum)
ACAMERA_INFO_START,
ACAMERA_INFO_END,
@@ -490,12 +456,6 @@
ACAMERA_SYNC_START + 1,
ACAMERA_SYNC_END,
- ACAMERA_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR = // float
- ACAMERA_REPROCESS_START,
- ACAMERA_REPROCESS_MAX_CAPTURE_STALL = // int32
- ACAMERA_REPROCESS_START + 1,
- ACAMERA_REPROCESS_END,
-
ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS = // int32[n*4] (enum)
ACAMERA_DEPTH_START + 1,
ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS = // int64[4*n]
@@ -514,462 +474,2256 @@
// ACAMERA_COLOR_CORRECTION_MODE
typedef enum acamera_metadata_enum_acamera_color_correction_mode {
+ /*
+ * <p>Use the ACAMERA_COLOR_CORRECTION_TRANSFORM matrix
+ * and ACAMERA_COLOR_CORRECTION_GAINS to do color conversion.</p>
+ * <p>All advanced white balance adjustments (not specified
+ * by our white balance pipeline) must be disabled.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * TRANSFORM_MATRIX is ignored. The camera device will override
+ * this value to either FAST or HIGH_QUALITY.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
ACAMERA_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX = 0,
+
+ /*
+ * <p>Color correction processing must not slow down
+ * capture rate relative to sensor raw output.</p>
+ * <p>Advanced white balance adjustments above and beyond
+ * the specified white balance pipeline may be applied.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * the camera device uses the last frame's AWB values
+ * (or defaults if AWB has never been run).</p>
+ *
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
ACAMERA_COLOR_CORRECTION_MODE_FAST = 1,
+
+ /*
+ * <p>Color correction processing operates at improved
+ * quality but the capture rate might be reduced (relative to sensor
+ * raw output rate)</p>
+ * <p>Advanced white balance adjustments above and beyond
+ * the specified white balance pipeline may be applied.</p>
+ * <p>If AWB is enabled with <code>ACAMERA_CONTROL_AWB_MODE != OFF</code>, then
+ * the camera device uses the last frame's AWB values
+ * (or defaults if AWB has never been run).</p>
+ *
+ * @see ACAMERA_CONTROL_AWB_MODE
+ */
ACAMERA_COLOR_CORRECTION_MODE_HIGH_QUALITY = 2,
+
} acamera_metadata_enum_android_color_correction_mode_t;
// ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
typedef enum acamera_metadata_enum_acamera_color_correction_aberration_mode {
+ /*
+ * <p>No aberration correction is applied.</p>
+ */
ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_OFF = 0,
+
+ /*
+ * <p>Aberration correction will not slow down capture rate
+ * relative to sensor raw output.</p>
+ */
ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_FAST = 1,
+
+ /*
+ * <p>Aberration correction operates at improved quality but the capture rate might be
+ * reduced (relative to sensor raw output rate)</p>
+ */
ACAMERA_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY = 2,
+
} acamera_metadata_enum_android_color_correction_aberration_mode_t;
// ACAMERA_CONTROL_AE_ANTIBANDING_MODE
typedef enum acamera_metadata_enum_acamera_control_ae_antibanding_mode {
+ /*
+ * <p>The camera device will not adjust exposure duration to
+ * avoid banding problems.</p>
+ */
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_OFF = 0,
+
+ /*
+ * <p>The camera device will adjust exposure duration to
+ * avoid banding problems with 50Hz illumination sources.</p>
+ */
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_50HZ = 1,
+
+ /*
+ * <p>The camera device will adjust exposure duration to
+ * avoid banding problems with 60Hz illumination
+ * sources.</p>
+ */
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ = 2,
+
+ /*
+ * <p>The camera device will automatically adapt its
+ * antibanding routine to the current illumination
+ * condition. This is the default mode if AUTO is
+ * available on given camera device.</p>
+ */
ACAMERA_CONTROL_AE_ANTIBANDING_MODE_AUTO = 3,
+
} acamera_metadata_enum_android_control_ae_antibanding_mode_t;
// ACAMERA_CONTROL_AE_LOCK
typedef enum acamera_metadata_enum_acamera_control_ae_lock {
+ /*
+ * <p>Auto-exposure lock is disabled; the AE algorithm
+ * is free to update its parameters.</p>
+ */
ACAMERA_CONTROL_AE_LOCK_OFF = 0,
+
+ /*
+ * <p>Auto-exposure lock is enabled; the AE algorithm
+ * must not update the exposure and sensitivity parameters
+ * while the lock is active.</p>
+ * <p>ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION setting changes
+ * will still take effect while auto-exposure is locked.</p>
+ * <p>Some rare LEGACY devices may not support
+ * this, in which case the value will always be overridden to OFF.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
+ */
ACAMERA_CONTROL_AE_LOCK_ON = 1,
+
} acamera_metadata_enum_android_control_ae_lock_t;
// ACAMERA_CONTROL_AE_MODE
typedef enum acamera_metadata_enum_acamera_control_ae_mode {
+ /*
+ * <p>The camera device's autoexposure routine is disabled.</p>
+ * <p>The application-selected ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY and
+ * ACAMERA_SENSOR_FRAME_DURATION are used by the camera
+ * device, along with ACAMERA_FLASH_* fields, if there's
+ * a flash unit for this camera device.</p>
+ * <p>Note that auto-white balance (AWB) and auto-focus (AF)
+ * behavior is device dependent when AE is in OFF mode.
+ * To have consistent behavior across different devices,
+ * it is recommended to either set AWB and AF to OFF mode
+ * or lock AWB and AF before setting AE to OFF.
+ * See ACAMERA_CONTROL_AWB_MODE, ACAMERA_CONTROL_AF_MODE,
+ * ACAMERA_CONTROL_AWB_LOCK, and ACAMERA_CONTROL_AF_TRIGGER
+ * for more details.</p>
+ * <p>LEGACY devices do not support the OFF mode and will
+ * override attempts to use this value to ON.</p>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_AF_TRIGGER
+ * @see ACAMERA_CONTROL_AWB_LOCK
+ * @see ACAMERA_CONTROL_AWB_MODE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_CONTROL_AE_MODE_OFF = 0,
+
+ /*
+ * <p>The camera device's autoexposure routine is active,
+ * with no flash control.</p>
+ * <p>The application's values for
+ * ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY, and
+ * ACAMERA_SENSOR_FRAME_DURATION are ignored. The
+ * application has control over the various
+ * ACAMERA_FLASH_* fields.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_CONTROL_AE_MODE_ON = 1,
+
+ /*
+ * <p>Like ON, except that the camera device also controls
+ * the camera's flash unit, firing it in low-light
+ * conditions.</p>
+ * <p>The flash may be fired during a precapture sequence
+ * (triggered by ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER) and
+ * may be fired for captures for which the
+ * ACAMERA_CONTROL_CAPTURE_INTENT field is set to
+ * STILL_CAPTURE</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ */
ACAMERA_CONTROL_AE_MODE_ON_AUTO_FLASH = 2,
+
+ /*
+ * <p>Like ON, except that the camera device also controls
+ * the camera's flash unit, always firing it for still
+ * captures.</p>
+ * <p>The flash may be fired during a precapture sequence
+ * (triggered by ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER) and
+ * will always be fired for captures for which the
+ * ACAMERA_CONTROL_CAPTURE_INTENT field is set to
+ * STILL_CAPTURE</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ */
ACAMERA_CONTROL_AE_MODE_ON_ALWAYS_FLASH = 3,
+
+ /*
+ * <p>Like ON_AUTO_FLASH, but with automatic red eye
+ * reduction.</p>
+ * <p>If deemed necessary by the camera device, a red eye
+ * reduction flash will fire during the precapture
+ * sequence.</p>
+ */
ACAMERA_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE = 4,
+
} acamera_metadata_enum_android_control_ae_mode_t;
// ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
typedef enum acamera_metadata_enum_acamera_control_ae_precapture_trigger {
+ /*
+ * <p>The trigger is idle.</p>
+ */
ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE = 0,
+
+ /*
+ * <p>The precapture metering sequence will be started
+ * by the camera device.</p>
+ * <p>The exact effect of the precapture trigger depends on
+ * the current AE mode and state.</p>
+ */
ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START = 1,
+
+ /*
+ * <p>The camera device will cancel any currently active or completed
+ * precapture metering sequence, the auto-exposure routine will return to its
+ * initial state.</p>
+ */
ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL = 2,
+
} acamera_metadata_enum_android_control_ae_precapture_trigger_t;
// ACAMERA_CONTROL_AF_MODE
typedef enum acamera_metadata_enum_acamera_control_af_mode {
+ /*
+ * <p>The auto-focus routine does not control the lens;
+ * ACAMERA_LENS_FOCUS_DISTANCE is controlled by the
+ * application.</p>
+ *
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ */
ACAMERA_CONTROL_AF_MODE_OFF = 0,
+
+ /*
+ * <p>Basic automatic focus mode.</p>
+ * <p>In this mode, the lens does not move unless
+ * the autofocus trigger action is called. When that trigger
+ * is activated, AF will transition to ACTIVE_SCAN, then to
+ * the outcome of the scan (FOCUSED or NOT_FOCUSED).</p>
+ * <p>Always supported if lens is not fixed focus.</p>
+ * <p>Use ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE to determine if lens
+ * is fixed-focus.</p>
+ * <p>Triggering AF_CANCEL resets the lens position to default,
+ * and sets the AF state to INACTIVE.</p>
+ *
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
ACAMERA_CONTROL_AF_MODE_AUTO = 1,
+
+ /*
+ * <p>Close-up focusing mode.</p>
+ * <p>In this mode, the lens does not move unless the
+ * autofocus trigger action is called. When that trigger is
+ * activated, AF will transition to ACTIVE_SCAN, then to
+ * the outcome of the scan (FOCUSED or NOT_FOCUSED). This
+ * mode is optimized for focusing on objects very close to
+ * the camera.</p>
+ * <p>When that trigger is activated, AF will transition to
+ * ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
+ * NOT_FOCUSED). Triggering cancel AF resets the lens
+ * position to default, and sets the AF state to
+ * INACTIVE.</p>
+ */
ACAMERA_CONTROL_AF_MODE_MACRO = 2,
+
+ /*
+ * <p>In this mode, the AF algorithm modifies the lens
+ * position continually to attempt to provide a
+ * constantly-in-focus image stream.</p>
+ * <p>The focusing behavior should be suitable for good quality
+ * video recording; typically this means slower focus
+ * movement and no overshoots. When the AF trigger is not
+ * involved, the AF algorithm should start in INACTIVE state,
+ * and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
+ * states as appropriate. When the AF trigger is activated,
+ * the algorithm should immediately transition into
+ * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ * lens position until a cancel AF trigger is received.</p>
+ * <p>Once cancel is received, the algorithm should transition
+ * back to INACTIVE and resume passive scan. Note that this
+ * behavior is not identical to CONTINUOUS_PICTURE, since an
+ * ongoing PASSIVE_SCAN must immediately be
+ * canceled.</p>
+ */
ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO = 3,
+
+ /*
+ * <p>In this mode, the AF algorithm modifies the lens
+ * position continually to attempt to provide a
+ * constantly-in-focus image stream.</p>
+ * <p>The focusing behavior should be suitable for still image
+ * capture; typically this means focusing as fast as
+ * possible. When the AF trigger is not involved, the AF
+ * algorithm should start in INACTIVE state, and then
+ * transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
+ * appropriate as it attempts to maintain focus. When the AF
+ * trigger is activated, the algorithm should finish its
+ * PASSIVE_SCAN if active, and then transition into
+ * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ * lens position until a cancel AF trigger is received.</p>
+ * <p>When the AF cancel trigger is activated, the algorithm
+ * should transition back to INACTIVE and then act as if it
+ * has just been started.</p>
+ */
ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4,
+
+ /*
+ * <p>Extended depth of field (digital focus) mode.</p>
+ * <p>The camera device will produce images with an extended
+ * depth of field automatically; no special focusing
+ * operations need to be done before taking a picture.</p>
+ * <p>AF triggers are ignored, and the AF state will always be
+ * INACTIVE.</p>
+ */
ACAMERA_CONTROL_AF_MODE_EDOF = 5,
+
} acamera_metadata_enum_android_control_af_mode_t;
// ACAMERA_CONTROL_AF_TRIGGER
typedef enum acamera_metadata_enum_acamera_control_af_trigger {
+ /*
+ * <p>The trigger is idle.</p>
+ */
ACAMERA_CONTROL_AF_TRIGGER_IDLE = 0,
+
+ /*
+ * <p>Autofocus will trigger now.</p>
+ */
ACAMERA_CONTROL_AF_TRIGGER_START = 1,
+
+ /*
+ * <p>Autofocus will return to its initial
+ * state, and cancel any currently active trigger.</p>
+ */
ACAMERA_CONTROL_AF_TRIGGER_CANCEL = 2,
+
} acamera_metadata_enum_android_control_af_trigger_t;
// ACAMERA_CONTROL_AWB_LOCK
typedef enum acamera_metadata_enum_acamera_control_awb_lock {
+ /*
+ * <p>Auto-white balance lock is disabled; the AWB
+ * algorithm is free to update its parameters if in AUTO
+ * mode.</p>
+ */
ACAMERA_CONTROL_AWB_LOCK_OFF = 0,
+
+ /*
+ * <p>Auto-white balance lock is enabled; the AWB
+ * algorithm will not update its parameters while the lock
+ * is active.</p>
+ */
ACAMERA_CONTROL_AWB_LOCK_ON = 1,
+
} acamera_metadata_enum_android_control_awb_lock_t;
// ACAMERA_CONTROL_AWB_MODE
typedef enum acamera_metadata_enum_acamera_control_awb_mode {
+ /*
+ * <p>The camera device's auto-white balance routine is disabled.</p>
+ * <p>The application-selected color transform matrix
+ * (ACAMERA_COLOR_CORRECTION_TRANSFORM) and gains
+ * (ACAMERA_COLOR_CORRECTION_GAINS) are used by the camera
+ * device for manual white balance control.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_OFF = 0,
+
+ /*
+ * <p>The camera device's auto-white balance routine is active.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_AUTO = 1,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses incandescent light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant A.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_INCANDESCENT = 2,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses fluorescent light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F2.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_FLUORESCENT = 3,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses warm fluorescent light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F4.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_WARM_FLUORESCENT = 4,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses daylight light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant D65.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_DAYLIGHT = 5,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses cloudy daylight light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT = 6,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses twilight light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_TWILIGHT = 7,
+
+ /*
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses shade light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ */
ACAMERA_CONTROL_AWB_MODE_SHADE = 8,
+
} acamera_metadata_enum_android_control_awb_mode_t;
// ACAMERA_CONTROL_CAPTURE_INTENT
typedef enum acamera_metadata_enum_acamera_control_capture_intent {
+ /*
+ * <p>The goal of this request doesn't fall into the other
+ * categories. The camera device will default to preview-like
+ * behavior.</p>
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_CUSTOM = 0,
+
+ /*
+ * <p>This request is for a preview-like use case.</p>
+ * <p>The precapture trigger may be used to start off a metering
+ * w/flash sequence.</p>
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_PREVIEW = 1,
+
+ /*
+ * <p>This request is for a still capture-type
+ * use case.</p>
+ * <p>If the flash unit is under automatic control, it may fire as needed.</p>
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2,
+
+ /*
+ * <p>This request is for a video recording
+ * use case.</p>
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3,
+
+ /*
+ * <p>This request is for a video snapshot (still
+ * image while recording video) use case.</p>
+ * <p>The camera device should take the highest-quality image
+ * possible (given the other settings) without disrupting the
+ * frame rate of video recording. </p>
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4,
+
+ /*
+ * <p>This request is for a ZSL usecase; the
+ * application will stream full-resolution images and
+ * reprocess one or several later for a final
+ * capture.</p>
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5,
+
+ /*
+ * <p>This request is for manual capture use case where
+ * the applications want to directly control the capture parameters.</p>
+ * <p>For example, the application may wish to manually control
+ * ACAMERA_SENSOR_EXPOSURE_TIME, ACAMERA_SENSOR_SENSITIVITY, etc.</p>
+ *
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
+
} acamera_metadata_enum_android_control_capture_intent_t;
// ACAMERA_CONTROL_EFFECT_MODE
typedef enum acamera_metadata_enum_acamera_control_effect_mode {
+ /*
+ * <p>No color effect will be applied.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_OFF = 0,
+
+ /*
+ * <p>A "monocolor" effect where the image is mapped into
+ * a single color.</p>
+ * <p>This will typically be grayscale.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_MONO = 1,
+
+ /*
+ * <p>A "photo-negative" effect where the image's colors
+ * are inverted.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_NEGATIVE = 2,
+
+ /*
+ * <p>A "solarisation" effect (Sabattier effect) where the
+ * image is wholly or partially reversed in
+ * tone.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_SOLARIZE = 3,
+
+ /*
+ * <p>A "sepia" effect where the image is mapped into warm
+ * gray, red, and brown tones.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_SEPIA = 4,
+
+ /*
+ * <p>A "posterization" effect where the image uses
+ * discrete regions of tone rather than a continuous
+ * gradient of tones.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_POSTERIZE = 5,
+
+ /*
+ * <p>A "whiteboard" effect where the image is typically displayed
+ * as regions of white, with black or grey details.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_WHITEBOARD = 6,
+
+ /*
+ * <p>A "blackboard" effect where the image is typically displayed
+ * as regions of black, with white or grey details.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_BLACKBOARD = 7,
+
+ /*
+ * <p>An "aqua" effect where a blue hue is added to the image.</p>
+ */
ACAMERA_CONTROL_EFFECT_MODE_AQUA = 8,
+
} acamera_metadata_enum_android_control_effect_mode_t;
// ACAMERA_CONTROL_MODE
typedef enum acamera_metadata_enum_acamera_control_mode {
+ /*
+ * <p>Full application control of pipeline.</p>
+ * <p>All control by the device's metering and focusing (3A)
+ * routines is disabled, and no other settings in
+ * ACAMERA_CONTROL_* have any effect, except that
+ * ACAMERA_CONTROL_CAPTURE_INTENT may be used by the camera
+ * device to select post-processing values for processing
+ * blocks that do not allow for manual control, or are not
+ * exposed by the camera API.</p>
+ * <p>However, the camera device's 3A routines may continue to
+ * collect statistics and update their internal state so that
+ * when control is switched to AUTO mode, good control values
+ * can be immediately applied.</p>
+ *
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ */
ACAMERA_CONTROL_MODE_OFF = 0,
+
+ /*
+ * <p>Use settings for each individual 3A routine.</p>
+ * <p>Manual control of capture parameters is disabled. All
+ * controls in ACAMERA_CONTROL_* besides sceneMode take
+ * effect.</p>
+ */
ACAMERA_CONTROL_MODE_AUTO = 1,
+
+ /*
+ * <p>Use a specific scene mode.</p>
+ * <p>Enabling this disables control.aeMode, control.awbMode and
+ * control.afMode controls; the camera device will ignore
+ * those settings while USE_SCENE_MODE is active (except for
+ * FACE_PRIORITY scene mode). Other control entries are still active.
+ * This setting can only be used if scene mode is supported (i.e.
+ * ACAMERA_CONTROL_AVAILABLE_SCENE_MODES
+ * contain some modes other than DISABLED).</p>
+ *
+ * @see ACAMERA_CONTROL_AVAILABLE_SCENE_MODES
+ */
ACAMERA_CONTROL_MODE_USE_SCENE_MODE = 2,
+
+ /*
+ * <p>Same as OFF mode, except that this capture will not be
+ * used by camera device background auto-exposure, auto-white balance and
+ * auto-focus algorithms (3A) to update their statistics.</p>
+ * <p>Specifically, the 3A routines are locked to the last
+ * values set from a request with AUTO, OFF, or
+ * USE_SCENE_MODE, and any statistics or state updates
+ * collected from manual captures with OFF_KEEP_STATE will be
+ * discarded by the camera device.</p>
+ */
ACAMERA_CONTROL_MODE_OFF_KEEP_STATE = 3,
+
} acamera_metadata_enum_android_control_mode_t;
// ACAMERA_CONTROL_SCENE_MODE
typedef enum acamera_metadata_enum_acamera_control_scene_mode {
+ /*
+ * <p>Indicates that no scene modes are set for a given capture request.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_DISABLED = 0,
+
+ /*
+ * <p>If face detection support exists, use face
+ * detection data for auto-focus, auto-white balance, and
+ * auto-exposure routines.</p>
+ * <p>If face detection statistics are disabled
+ * (i.e. ACAMERA_STATISTICS_FACE_DETECT_MODE is set to OFF),
+ * this should still operate correctly (but will not return
+ * face detection statistics to the framework).</p>
+ * <p>Unlike the other scene modes, ACAMERA_CONTROL_AE_MODE,
+ * ACAMERA_CONTROL_AWB_MODE, and ACAMERA_CONTROL_AF_MODE
+ * remain active when FACE_PRIORITY is set.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_AWB_MODE
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ */
ACAMERA_CONTROL_SCENE_MODE_FACE_PRIORITY = 1,
+
+ /*
+ * <p>Optimized for photos of quickly moving objects.</p>
+ * <p>Similar to SPORTS.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_ACTION = 2,
+
+ /*
+ * <p>Optimized for still photos of people.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_PORTRAIT = 3,
+
+ /*
+ * <p>Optimized for photos of distant macroscopic objects.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_LANDSCAPE = 4,
+
+ /*
+ * <p>Optimized for low-light settings.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_NIGHT = 5,
+
+ /*
+ * <p>Optimized for still photos of people in low-light
+ * settings.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_NIGHT_PORTRAIT = 6,
+
+ /*
+ * <p>Optimized for dim, indoor settings where flash must
+ * remain off.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_THEATRE = 7,
+
+ /*
+ * <p>Optimized for bright, outdoor beach settings.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_BEACH = 8,
+
+ /*
+ * <p>Optimized for bright, outdoor settings containing snow.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_SNOW = 9,
+
+ /*
+ * <p>Optimized for scenes of the setting sun.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_SUNSET = 10,
+
+ /*
+ * <p>Optimized to avoid blurry photos due to small amounts of
+ * device motion (for example: due to hand shake).</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_STEADYPHOTO = 11,
+
+ /*
+ * <p>Optimized for nighttime photos of fireworks.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_FIREWORKS = 12,
+
+ /*
+ * <p>Optimized for photos of quickly moving people.</p>
+ * <p>Similar to ACTION.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_SPORTS = 13,
+
+ /*
+ * <p>Optimized for dim, indoor settings with multiple moving
+ * people.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_PARTY = 14,
+
+ /*
+ * <p>Optimized for dim settings where the main light source
+ * is a flame.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_CANDLELIGHT = 15,
+
+ /*
+ * <p>Optimized for accurately capturing a photo of barcode
+ * for use by camera applications that wish to read the
+ * barcode value.</p>
+ */
ACAMERA_CONTROL_SCENE_MODE_BARCODE = 16,
+
+ /*
+ * <p>This is deprecated, please use {@link
+ * android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
+ * and {@link
+ * android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
+ * for high speed video recording.</p>
+ * <p>Optimized for high speed video recording (frame rate >=60fps) use case.</p>
+ * <p>The supported high speed video sizes and fps ranges are specified in
+ * android.control.availableHighSpeedVideoConfigurations. To get desired
+ * output frame rates, the application is only allowed to select video size
+ * and fps range combinations listed in this static metadata. The fps range
+ * can be control via ACAMERA_CONTROL_AE_TARGET_FPS_RANGE.</p>
+ * <p>In this mode, the camera device will override aeMode, awbMode, and afMode to
+ * ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+ * controls will be overridden to be FAST. Therefore, no manual control of capture
+ * and post-processing parameters is possible. All other controls operate the
+ * same as when ACAMERA_CONTROL_MODE == AUTO. This means that all other
+ * ACAMERA_CONTROL_* fields continue to work, such as</p>
+ * <ul>
+ * <li>ACAMERA_CONTROL_AE_TARGET_FPS_RANGE</li>
+ * <li>ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION</li>
+ * <li>ACAMERA_CONTROL_AE_LOCK</li>
+ * <li>ACAMERA_CONTROL_AWB_LOCK</li>
+ * <li>ACAMERA_CONTROL_EFFECT_MODE</li>
+ * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AF_TRIGGER</li>
+ * <li>ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER</li>
+ * </ul>
+ * <p>Outside of ACAMERA_CONTROL_*, the following controls will work:</p>
+ * <ul>
+ * <li>ACAMERA_FLASH_MODE (automatic flash for still capture will not work since aeMode is ON)</li>
+ * <li>ACAMERA_LENS_OPTICAL_STABILIZATION_MODE (if it is supported)</li>
+ * <li>ACAMERA_SCALER_CROP_REGION</li>
+ * <li>ACAMERA_STATISTICS_FACE_DETECT_MODE</li>
+ * </ul>
+ * <p>For high speed recording use case, the actual maximum supported frame rate may
+ * be lower than what camera can output, depending on the destination Surfaces for
+ * the image data. For example, if the destination surface is from video encoder,
+ * the application need check if the video encoder is capable of supporting the
+ * high frame rate for a given video size, or it will end up with lower recording
+ * frame rate. If the destination surface is from preview window, the preview frame
+ * rate will be bounded by the screen refresh rate.</p>
+ * <p>The camera device will only support up to 2 output high speed streams
+ * (processed non-stalling format defined in ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS)
+ * in this mode. This control will be effective only if all of below conditions are true:</p>
+ * <ul>
+ * <li>The application created no more than maxNumHighSpeedStreams processed non-stalling
+ * format output streams, where maxNumHighSpeedStreams is calculated as
+ * min(2, ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS[Processed (but not-stalling)]).</li>
+ * <li>The stream sizes are selected from the sizes reported by
+ * android.control.availableHighSpeedVideoConfigurations.</li>
+ * <li>No processed non-stalling or raw streams are configured.</li>
+ * </ul>
+ * <p>When above conditions are NOT satistied, the controls of this mode and
+ * ACAMERA_CONTROL_AE_TARGET_FPS_RANGE will be ignored by the camera device,
+ * the camera device will fall back to ACAMERA_CONTROL_MODE <code>==</code> AUTO,
+ * and the returned capture result metadata will give the fps range choosen
+ * by the camera device.</p>
+ * <p>Switching into or out of this mode may trigger some camera ISP/sensor
+ * reconfigurations, which may introduce extra latency. It is recommended that
+ * the application avoids unnecessary scene mode switch as much as possible.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION
+ * @see ACAMERA_CONTROL_AE_LOCK
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_AE_REGIONS
+ * @see ACAMERA_CONTROL_AE_TARGET_FPS_RANGE
+ * @see ACAMERA_CONTROL_AF_REGIONS
+ * @see ACAMERA_CONTROL_AF_TRIGGER
+ * @see ACAMERA_CONTROL_AWB_LOCK
+ * @see ACAMERA_CONTROL_AWB_REGIONS
+ * @see ACAMERA_CONTROL_EFFECT_MODE
+ * @see ACAMERA_CONTROL_MODE
+ * @see ACAMERA_FLASH_MODE
+ * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
+ * @see ACAMERA_REQUEST_MAX_NUM_OUTPUT_STREAMS
+ * @see ACAMERA_SCALER_CROP_REGION
+ * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
+ *
+ * <b>Deprecated</b>: please refer to this API documentation to find the alternatives
+ */
ACAMERA_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO = 17,
+
+ /*
+ * <p>Turn on a device-specific high dynamic range (HDR) mode.</p>
+ * <p>In this scene mode, the camera device captures images
+ * that keep a larger range of scene illumination levels
+ * visible in the final image. For example, when taking a
+ * picture of a object in front of a bright window, both
+ * the object and the scene through the window may be
+ * visible when using HDR mode, while in normal AUTO mode,
+ * one or the other may be poorly exposed. As a tradeoff,
+ * HDR mode generally takes much longer to capture a single
+ * image, has no user control, and may have other artifacts
+ * depending on the HDR method used.</p>
+ * <p>Therefore, HDR captures operate at a much slower rate
+ * than regular captures.</p>
+ * <p>In this mode, on LIMITED or FULL devices, when a request
+ * is made with a ACAMERA_CONTROL_CAPTURE_INTENT of
+ * STILL_CAPTURE, the camera device will capture an image
+ * using a high dynamic range capture technique. On LEGACY
+ * devices, captures that target a JPEG-format output will
+ * be captured with HDR, and the capture intent is not
+ * relevant.</p>
+ * <p>The HDR capture may involve the device capturing a burst
+ * of images internally and combining them into one, or it
+ * may involve the device using specialized high dynamic
+ * range capture hardware. In all cases, a single image is
+ * produced in response to a capture request submitted
+ * while in HDR mode.</p>
+ * <p>Since substantial post-processing is generally needed to
+ * produce an HDR image, only YUV and JPEG outputs are
+ * supported for LIMITED/FULL device HDR captures, and only
+ * JPEG outputs are supported for LEGACY HDR
+ * captures. Using a RAW output for HDR capture is not
+ * supported.</p>
+ *
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ */
ACAMERA_CONTROL_SCENE_MODE_HDR = 18,
+
} acamera_metadata_enum_android_control_scene_mode_t;
// ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
typedef enum acamera_metadata_enum_acamera_control_video_stabilization_mode {
+ /*
+ * <p>Video stabilization is disabled.</p>
+ */
ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_OFF = 0,
+
+ /*
+ * <p>Video stabilization is enabled.</p>
+ */
ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_ON = 1,
+
} acamera_metadata_enum_android_control_video_stabilization_mode_t;
// ACAMERA_CONTROL_AE_STATE
typedef enum acamera_metadata_enum_acamera_control_ae_state {
+ /*
+ * <p>AE is off or recently reset.</p>
+ * <p>When a camera device is opened, it starts in
+ * this state. This is a transient state, the camera device may skip reporting
+ * this state in capture result.</p>
+ */
ACAMERA_CONTROL_AE_STATE_INACTIVE = 0,
+
+ /*
+ * <p>AE doesn't yet have a good set of control values
+ * for the current scene.</p>
+ * <p>This is a transient state, the camera device may skip
+ * reporting this state in capture result.</p>
+ */
ACAMERA_CONTROL_AE_STATE_SEARCHING = 1,
+
+ /*
+ * <p>AE has a good set of control values for the
+ * current scene.</p>
+ */
ACAMERA_CONTROL_AE_STATE_CONVERGED = 2,
+
+ /*
+ * <p>AE has been locked.</p>
+ */
ACAMERA_CONTROL_AE_STATE_LOCKED = 3,
+
+ /*
+ * <p>AE has a good set of control values, but flash
+ * needs to be fired for good quality still
+ * capture.</p>
+ */
ACAMERA_CONTROL_AE_STATE_FLASH_REQUIRED = 4,
+
+ /*
+ * <p>AE has been asked to do a precapture sequence
+ * and is currently executing it.</p>
+ * <p>Precapture can be triggered through setting
+ * ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER to START. Currently
+ * active and completed (if it causes camera device internal AE lock) precapture
+ * metering sequence can be canceled through setting
+ * ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER to CANCEL.</p>
+ * <p>Once PRECAPTURE completes, AE will transition to CONVERGED
+ * or FLASH_REQUIRED as appropriate. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ */
ACAMERA_CONTROL_AE_STATE_PRECAPTURE = 5,
+
} acamera_metadata_enum_android_control_ae_state_t;
// ACAMERA_CONTROL_AF_STATE
typedef enum acamera_metadata_enum_acamera_control_af_state {
+ /*
+ * <p>AF is off or has not yet tried to scan/been asked
+ * to scan.</p>
+ * <p>When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.</p>
+ */
ACAMERA_CONTROL_AF_STATE_INACTIVE = 0,
+
+ /*
+ * <p>AF is currently performing an AF scan initiated the
+ * camera device in a continuous autofocus mode.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ */
ACAMERA_CONTROL_AF_STATE_PASSIVE_SCAN = 1,
+
+ /*
+ * <p>AF currently believes it is in focus, but may
+ * restart scanning at any time.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ */
ACAMERA_CONTROL_AF_STATE_PASSIVE_FOCUSED = 2,
+
+ /*
+ * <p>AF is performing an AF scan because it was
+ * triggered by AF trigger.</p>
+ * <p>Only used by AUTO or MACRO AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ */
ACAMERA_CONTROL_AF_STATE_ACTIVE_SCAN = 3,
+
+ /*
+ * <p>AF believes it is focused correctly and has locked
+ * focus.</p>
+ * <p>This state is reached only after an explicit START AF trigger has been
+ * sent (ACAMERA_CONTROL_AF_TRIGGER), when good focus has been obtained.</p>
+ * <p>The lens will remain stationary until the AF mode (ACAMERA_CONTROL_AF_MODE) is changed or
+ * a new AF trigger is sent to the camera device (ACAMERA_CONTROL_AF_TRIGGER).</p>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_AF_TRIGGER
+ */
ACAMERA_CONTROL_AF_STATE_FOCUSED_LOCKED = 4,
+
+ /*
+ * <p>AF has failed to focus successfully and has locked
+ * focus.</p>
+ * <p>This state is reached only after an explicit START AF trigger has been
+ * sent (ACAMERA_CONTROL_AF_TRIGGER), when good focus cannot be obtained.</p>
+ * <p>The lens will remain stationary until the AF mode (ACAMERA_CONTROL_AF_MODE) is changed or
+ * a new AF trigger is sent to the camera device (ACAMERA_CONTROL_AF_TRIGGER).</p>
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_AF_TRIGGER
+ */
ACAMERA_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED = 5,
+
+ /*
+ * <p>AF finished a passive scan without finding focus,
+ * and may restart scanning at any time.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
+ * device may skip reporting this state in capture result.</p>
+ * <p>LEGACY camera devices do not support this state. When a passive
+ * scan has finished, it will always go to PASSIVE_FOCUSED.</p>
+ */
ACAMERA_CONTROL_AF_STATE_PASSIVE_UNFOCUSED = 6,
+
} acamera_metadata_enum_android_control_af_state_t;
// ACAMERA_CONTROL_AWB_STATE
typedef enum acamera_metadata_enum_acamera_control_awb_state {
+ /*
+ * <p>AWB is not in auto mode, or has not yet started metering.</p>
+ * <p>When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.</p>
+ */
ACAMERA_CONTROL_AWB_STATE_INACTIVE = 0,
+
+ /*
+ * <p>AWB doesn't yet have a good set of control
+ * values for the current scene.</p>
+ * <p>This is a transient state, the camera device
+ * may skip reporting this state in capture result.</p>
+ */
ACAMERA_CONTROL_AWB_STATE_SEARCHING = 1,
+
+ /*
+ * <p>AWB has a good set of control values for the
+ * current scene.</p>
+ */
ACAMERA_CONTROL_AWB_STATE_CONVERGED = 2,
+
+ /*
+ * <p>AWB has been locked.</p>
+ */
ACAMERA_CONTROL_AWB_STATE_LOCKED = 3,
+
} acamera_metadata_enum_android_control_awb_state_t;
// ACAMERA_CONTROL_AE_LOCK_AVAILABLE
typedef enum acamera_metadata_enum_acamera_control_ae_lock_available {
ACAMERA_CONTROL_AE_LOCK_AVAILABLE_FALSE = 0,
+
ACAMERA_CONTROL_AE_LOCK_AVAILABLE_TRUE = 1,
+
} acamera_metadata_enum_android_control_ae_lock_available_t;
// ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
typedef enum acamera_metadata_enum_acamera_control_awb_lock_available {
ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_FALSE = 0,
+
ACAMERA_CONTROL_AWB_LOCK_AVAILABLE_TRUE = 1,
+
} acamera_metadata_enum_android_control_awb_lock_available_t;
// ACAMERA_EDGE_MODE
typedef enum acamera_metadata_enum_acamera_edge_mode {
+ /*
+ * <p>No edge enhancement is applied.</p>
+ */
ACAMERA_EDGE_MODE_OFF = 0,
+
+ /*
+ * <p>Apply edge enhancement at a quality level that does not slow down frame rate
+ * relative to sensor output. It may be the same as OFF if edge enhancement will
+ * slow down frame rate relative to sensor.</p>
+ */
ACAMERA_EDGE_MODE_FAST = 1,
+
+ /*
+ * <p>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.</p>
+ */
ACAMERA_EDGE_MODE_HIGH_QUALITY = 2,
+
+ /*
+ * <p>Edge enhancement is applied at different levels for different output streams,
+ * based on resolution. Streams at maximum recording resolution (see {@link
+ * android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
+ * edge enhancement applied, while higher-resolution streams have no edge enhancement
+ * applied. The level of edge enhancement for low-resolution streams is tuned so that
+ * frame rate is not impacted, and the quality is equal to or better than FAST (since it
+ * is only applied to lower-resolution outputs, quality may improve from FAST).</p>
+ * <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ * with YUV or PRIVATE reprocessing, where the application continuously captures
+ * high-resolution intermediate buffers into a circular buffer, from which a final image is
+ * produced via reprocessing when a user takes a picture. For such a use case, the
+ * high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+ * preview and to avoid double-applying enhancement when reprocessed, while low-resolution
+ * buffers (used for recording or preview, generally) need edge enhancement applied for
+ * reasonable preview quality.</p>
+ * <p>This mode is guaranteed to be supported by devices that support either the
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ * (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES lists either of those capabilities) and it will
+ * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_EDGE_MODE_ZERO_SHUTTER_LAG = 3,
+
} acamera_metadata_enum_android_edge_mode_t;
// ACAMERA_FLASH_MODE
typedef enum acamera_metadata_enum_acamera_flash_mode {
+ /*
+ * <p>Do not fire the flash for this capture.</p>
+ */
ACAMERA_FLASH_MODE_OFF = 0,
+
+ /*
+ * <p>If the flash is available and charged, fire flash
+ * for this capture.</p>
+ */
ACAMERA_FLASH_MODE_SINGLE = 1,
+
+ /*
+ * <p>Transition flash to continuously on.</p>
+ */
ACAMERA_FLASH_MODE_TORCH = 2,
+
} acamera_metadata_enum_android_flash_mode_t;
// ACAMERA_FLASH_STATE
typedef enum acamera_metadata_enum_acamera_flash_state {
+ /*
+ * <p>No flash on camera.</p>
+ */
ACAMERA_FLASH_STATE_UNAVAILABLE = 0,
+
+ /*
+ * <p>Flash is charging and cannot be fired.</p>
+ */
ACAMERA_FLASH_STATE_CHARGING = 1,
+
+ /*
+ * <p>Flash is ready to fire.</p>
+ */
ACAMERA_FLASH_STATE_READY = 2,
+
+ /*
+ * <p>Flash fired for this capture.</p>
+ */
ACAMERA_FLASH_STATE_FIRED = 3,
+
+ /*
+ * <p>Flash partially illuminated this frame.</p>
+ * <p>This is usually due to the next or previous frame having
+ * the flash fire, and the flash spilling into this capture
+ * due to hardware limitations.</p>
+ */
ACAMERA_FLASH_STATE_PARTIAL = 4,
+
} acamera_metadata_enum_android_flash_state_t;
// ACAMERA_FLASH_INFO_AVAILABLE
typedef enum acamera_metadata_enum_acamera_flash_info_available {
ACAMERA_FLASH_INFO_AVAILABLE_FALSE = 0,
+
ACAMERA_FLASH_INFO_AVAILABLE_TRUE = 1,
+
} acamera_metadata_enum_android_flash_info_available_t;
// ACAMERA_HOT_PIXEL_MODE
typedef enum acamera_metadata_enum_acamera_hot_pixel_mode {
+ /*
+ * <p>No hot pixel correction is applied.</p>
+ * <p>The frame rate must not be reduced relative to sensor raw output
+ * for this option.</p>
+ * <p>The hotpixel map may be returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.</p>
+ *
+ * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
+ */
ACAMERA_HOT_PIXEL_MODE_OFF = 0,
+
+ /*
+ * <p>Hot pixel correction is applied, without reducing frame
+ * rate relative to sensor raw output.</p>
+ * <p>The hotpixel map may be returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.</p>
+ *
+ * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
+ */
ACAMERA_HOT_PIXEL_MODE_FAST = 1,
+
+ /*
+ * <p>High-quality hot pixel correction is applied, at a cost
+ * of possibly reduced frame rate relative to sensor raw output.</p>
+ * <p>The hotpixel map may be returned in ACAMERA_STATISTICS_HOT_PIXEL_MAP.</p>
+ *
+ * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
+ */
ACAMERA_HOT_PIXEL_MODE_HIGH_QUALITY = 2,
+
} acamera_metadata_enum_android_hot_pixel_mode_t;
// ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
typedef enum acamera_metadata_enum_acamera_lens_optical_stabilization_mode {
+ /*
+ * <p>Optical stabilization is unavailable.</p>
+ */
ACAMERA_LENS_OPTICAL_STABILIZATION_MODE_OFF = 0,
+
+ /*
+ * <p>Optical stabilization is enabled.</p>
+ */
ACAMERA_LENS_OPTICAL_STABILIZATION_MODE_ON = 1,
+
} acamera_metadata_enum_android_lens_optical_stabilization_mode_t;
// ACAMERA_LENS_FACING
typedef enum acamera_metadata_enum_acamera_lens_facing {
+ /*
+ * <p>The camera device faces the same direction as the device's screen.</p>
+ */
ACAMERA_LENS_FACING_FRONT = 0,
+
+ /*
+ * <p>The camera device faces the opposite direction as the device's screen.</p>
+ */
ACAMERA_LENS_FACING_BACK = 1,
+
+ /*
+ * <p>The camera device is an external camera, and has no fixed facing relative to the
+ * device's screen.</p>
+ */
ACAMERA_LENS_FACING_EXTERNAL = 2,
+
} acamera_metadata_enum_android_lens_facing_t;
// ACAMERA_LENS_STATE
typedef enum acamera_metadata_enum_acamera_lens_state {
+ /*
+ * <p>The lens parameters (ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE) are not changing.</p>
+ *
+ * @see ACAMERA_LENS_APERTURE
+ * @see ACAMERA_LENS_FILTER_DENSITY
+ * @see ACAMERA_LENS_FOCAL_LENGTH
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ */
ACAMERA_LENS_STATE_STATIONARY = 0,
+
+ /*
+ * <p>One or several of the lens parameters
+ * (ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FILTER_DENSITY or ACAMERA_LENS_APERTURE) is
+ * currently changing.</p>
+ *
+ * @see ACAMERA_LENS_APERTURE
+ * @see ACAMERA_LENS_FILTER_DENSITY
+ * @see ACAMERA_LENS_FOCAL_LENGTH
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ */
ACAMERA_LENS_STATE_MOVING = 1,
+
} acamera_metadata_enum_android_lens_state_t;
// ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
typedef enum acamera_metadata_enum_acamera_lens_info_focus_distance_calibration {
+ /*
+ * <p>The lens focus distance is not accurate, and the units used for
+ * ACAMERA_LENS_FOCUS_DISTANCE do not correspond to any physical units.</p>
+ * <p>Setting the lens to the same focus distance on separate occasions may
+ * result in a different real focus distance, depending on factors such
+ * as the orientation of the device, the age of the focusing mechanism,
+ * and the device temperature. The focus distance value will still be
+ * in the range of <code>[0, ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE]</code>, where 0
+ * represents the farthest focus.</p>
+ *
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ * @see ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0,
+
+ /*
+ * <p>The lens focus distance is measured in diopters.</p>
+ * <p>However, setting the lens to the same focus distance
+ * on separate occasions may result in a different real
+ * focus distance, depending on factors such as the
+ * orientation of the device, the age of the focusing
+ * mechanism, and the device temperature.</p>
+ */
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE = 1,
+
+ /*
+ * <p>The lens focus distance is measured in diopters, and
+ * is calibrated.</p>
+ * <p>The lens mechanism is calibrated so that setting the
+ * same focus distance is repeatable on multiple
+ * occasions with good accuracy, and the focus distance
+ * corresponds to the real physical distance to the plane
+ * of best focus.</p>
+ */
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED = 2,
+
} acamera_metadata_enum_android_lens_info_focus_distance_calibration_t;
// ACAMERA_NOISE_REDUCTION_MODE
typedef enum acamera_metadata_enum_acamera_noise_reduction_mode {
+ /*
+ * <p>No noise reduction is applied.</p>
+ */
ACAMERA_NOISE_REDUCTION_MODE_OFF = 0,
+
+ /*
+ * <p>Noise reduction is applied without reducing frame rate relative to sensor
+ * output. It may be the same as OFF if noise reduction will reduce frame rate
+ * relative to sensor.</p>
+ */
ACAMERA_NOISE_REDUCTION_MODE_FAST = 1,
+
+ /*
+ * <p>High-quality noise reduction is applied, at the cost of possibly reduced frame
+ * rate relative to sensor output.</p>
+ */
ACAMERA_NOISE_REDUCTION_MODE_HIGH_QUALITY = 2,
+
+ /*
+ * <p>MINIMAL noise reduction is applied without reducing frame rate relative to
+ * sensor output. </p>
+ */
ACAMERA_NOISE_REDUCTION_MODE_MINIMAL = 3,
+
+ /*
+ * <p>Noise reduction is applied at different levels for different output streams,
+ * based on resolution. Streams at maximum recording resolution (see {@link
+ * android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
+ * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
+ * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
+ * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
+ * is equal to or better than FAST (since it is only applied to lower-resolution outputs,
+ * quality may improve from FAST).</p>
+ * <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ * with YUV or PRIVATE reprocessing, where the application continuously captures
+ * high-resolution intermediate buffers into a circular buffer, from which a final image is
+ * produced via reprocessing when a user takes a picture. For such a use case, the
+ * high-resolution buffers must not have noise reduction applied to maximize efficiency of
+ * preview and to avoid over-applying noise filtering when reprocessing, while
+ * low-resolution buffers (used for recording or preview, generally) need noise reduction
+ * applied for reasonable preview quality.</p>
+ * <p>This mode is guaranteed to be supported by devices that support either the
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ * (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES lists either of those capabilities) and it will
+ * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG = 4,
+
} acamera_metadata_enum_android_noise_reduction_mode_t;
-// ACAMERA_QUIRKS_PARTIAL_RESULT
-typedef enum acamera_metadata_enum_acamera_quirks_partial_result {
- ACAMERA_QUIRKS_PARTIAL_RESULT_FINAL = 0,
- ACAMERA_QUIRKS_PARTIAL_RESULT_PARTIAL = 1,
-} acamera_metadata_enum_android_quirks_partial_result_t;
-
// ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
+ /*
+ * <p>The minimal set of capabilities that every camera
+ * device (regardless of ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL)
+ * supports.</p>
+ * <p>This capability is listed by all normal devices, and
+ * indicates that the camera device has a feature set
+ * that's comparable to the baseline requirements for the
+ * older android.hardware.Camera API.</p>
+ * <p>Devices with the DEPTH_OUTPUT capability might not list this
+ * capability, indicating that they support only depth measurement,
+ * not standard color output.</p>
+ *
+ * @see ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE = 0,
+
+ /*
+ * <p>The camera device can be manually controlled (3A algorithms such
+ * as auto-exposure, and auto-focus can be bypassed).
+ * The camera device supports basic manual control of the sensor image
+ * acquisition related stages. This means the following controls are
+ * guaranteed to be supported:</p>
+ * <ul>
+ * <li>Manual frame duration control<ul>
+ * <li>ACAMERA_SENSOR_FRAME_DURATION</li>
+ * <li>ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION</li>
+ * </ul>
+ * </li>
+ * <li>Manual exposure control<ul>
+ * <li>ACAMERA_SENSOR_EXPOSURE_TIME</li>
+ * <li>ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE</li>
+ * </ul>
+ * </li>
+ * <li>Manual sensitivity control<ul>
+ * <li>ACAMERA_SENSOR_SENSITIVITY</li>
+ * <li>ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE</li>
+ * </ul>
+ * </li>
+ * <li>Manual lens control (if the lens is adjustable)<ul>
+ * <li>ACAMERA_LENS_*</li>
+ * </ul>
+ * </li>
+ * <li>Manual flash control (if a flash unit is present)<ul>
+ * <li>ACAMERA_FLASH_*</li>
+ * </ul>
+ * </li>
+ * <li>Manual black level locking<ul>
+ * <li>ACAMERA_BLACK_LEVEL_LOCK</li>
+ * </ul>
+ * </li>
+ * <li>Auto exposure lock<ul>
+ * <li>ACAMERA_CONTROL_AE_LOCK</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * <p>If any of the above 3A algorithms are enabled, then the camera
+ * device will accurately report the values applied by 3A in the
+ * result.</p>
+ * <p>A given camera device may also support additional manual sensor controls,
+ * but this capability only covers the above list of controls.</p>
+ * <p>If this is supported, android.scaler.streamConfigurationMap will
+ * additionally return a min frame duration that is greater than
+ * zero for each supported size-format combination.</p>
+ *
+ * @see ACAMERA_BLACK_LEVEL_LOCK
+ * @see ACAMERA_CONTROL_AE_LOCK
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ * @see ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE
+ * @see ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION
+ * @see ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 1,
+
+ /*
+ * <p>The camera device post-processing stages can be manually controlled.
+ * The camera device supports basic manual control of the image post-processing
+ * stages. This means the following controls are guaranteed to be supported:</p>
+ * <ul>
+ * <li>
+ * <p>Manual tonemap control</p>
+ * <ul>
+ * <li>android.tonemap.curve</li>
+ * <li>ACAMERA_TONEMAP_MODE</li>
+ * <li>ACAMERA_TONEMAP_MAX_CURVE_POINTS</li>
+ * <li>ACAMERA_TONEMAP_GAMMA</li>
+ * <li>ACAMERA_TONEMAP_PRESET_CURVE</li>
+ * </ul>
+ * </li>
+ * <li>
+ * <p>Manual white balance control</p>
+ * <ul>
+ * <li>ACAMERA_COLOR_CORRECTION_TRANSFORM</li>
+ * <li>ACAMERA_COLOR_CORRECTION_GAINS</li>
+ * </ul>
+ * </li>
+ * <li>Manual lens shading map control<ul>
+ * <li>ACAMERA_SHADING_MODE</li>
+ * <li>ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE</li>
+ * <li>ACAMERA_STATISTICS_LENS_SHADING_MAP</li>
+ * <li>ACAMERA_LENS_INFO_SHADING_MAP_SIZE</li>
+ * </ul>
+ * </li>
+ * <li>Manual aberration correction control (if aberration correction is supported)<ul>
+ * <li>ACAMERA_COLOR_CORRECTION_ABERRATION_MODE</li>
+ * <li>ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES</li>
+ * </ul>
+ * </li>
+ * <li>Auto white balance lock<ul>
+ * <li>ACAMERA_CONTROL_AWB_LOCK</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * <p>If auto white balance is enabled, then the camera device
+ * will accurately report the values applied by AWB in the result.</p>
+ * <p>A given camera device may also support additional post-processing
+ * controls, but this capability only covers the above list of controls.</p>
+ *
+ * @see ACAMERA_COLOR_CORRECTION_ABERRATION_MODE
+ * @see ACAMERA_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
+ * @see ACAMERA_COLOR_CORRECTION_GAINS
+ * @see ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * @see ACAMERA_CONTROL_AWB_LOCK
+ * @see ACAMERA_LENS_INFO_SHADING_MAP_SIZE
+ * @see ACAMERA_SHADING_MODE
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP
+ * @see ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
+ * @see ACAMERA_TONEMAP_GAMMA
+ * @see ACAMERA_TONEMAP_MAX_CURVE_POINTS
+ * @see ACAMERA_TONEMAP_MODE
+ * @see ACAMERA_TONEMAP_PRESET_CURVE
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING = 2,
+
+ /*
+ * <p>The camera device supports outputting RAW buffers and
+ * metadata for interpreting them.</p>
+ * <p>Devices supporting the RAW capability allow both for
+ * saving DNG files, and for direct application processing of
+ * raw sensor images.</p>
+ * <ul>
+ * <li>RAW_SENSOR is supported as an output format.</li>
+ * <li>The maximum available resolution for RAW_SENSOR streams
+ * will match either the value in
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE or
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.</li>
+ * <li>All DNG-related optional metadata entries are provided
+ * by the camera device.</li>
+ * </ul>
+ *
+ * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_RAW = 3,
- ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING = 4,
+
+ /*
+ * <p>The camera device supports accurately reporting the sensor settings for many of
+ * the sensor controls while the built-in 3A algorithm is running. This allows
+ * reporting of sensor settings even when these settings cannot be manually changed.</p>
+ * <p>The values reported for the following controls are guaranteed to be available
+ * in the CaptureResult, including when 3A is enabled:</p>
+ * <ul>
+ * <li>Exposure control<ul>
+ * <li>ACAMERA_SENSOR_EXPOSURE_TIME</li>
+ * </ul>
+ * </li>
+ * <li>Sensitivity control<ul>
+ * <li>ACAMERA_SENSOR_SENSITIVITY</li>
+ * </ul>
+ * </li>
+ * <li>Lens controls (if the lens is adjustable)<ul>
+ * <li>ACAMERA_LENS_FOCUS_DISTANCE</li>
+ * <li>ACAMERA_LENS_APERTURE</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * <p>This capability is a subset of the MANUAL_SENSOR control capability, and will
+ * always be included if the MANUAL_SENSOR capability is available.</p>
+ *
+ * @see ACAMERA_LENS_APERTURE
+ * @see ACAMERA_LENS_FOCUS_DISTANCE
+ * @see ACAMERA_SENSOR_EXPOSURE_TIME
+ * @see ACAMERA_SENSOR_SENSITIVITY
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS = 5,
+
+ /*
+ * <p>The camera device supports capturing high-resolution images at >= 20 frames per
+ * second, in at least the uncompressed YUV format, when post-processing settings are set
+ * to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames
+ * per second. Here, 'high resolution' means at least 8 megapixels, or the maximum
+ * resolution of the device, whichever is smaller.</p>
+ * <p>More specifically, this means that a size matching the camera device's active array
+ * size is listed as a supported size for the {@link
+ * android.graphics.ImageFormat#YUV_420_888} format in either {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
+ * with a minimum frame duration for that format and size of either <= 1/20 s, or
+ * <= 1/10 s, respectively; and the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry
+ * lists at least one FPS range where the minimum FPS is >= 1 / minimumFrameDuration
+ * for the maximum-size YUV_420_888 format. If that maximum size is listed in {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
+ * then the list of resolutions for YUV_420_888 from {@link
+ * android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
+ * least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20
+ * s.</p>
+ * <p>If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
+ * android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
+ * as the maximum-size YUV_420_888 resolution is.</p>
+ * <p>If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
+ * as for the YUV_420_888 format also apply to the {@link
+ * android.graphics.ImageFormat#PRIVATE} format.</p>
+ * <p>In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
+ * and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
+ * are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
+ * consistent image output.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see ACAMERA_CONTROL_AE_LOCK_AVAILABLE
+ * @see ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
+ * @see ACAMERA_SYNC_MAX_LATENCY
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE = 6,
- ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING = 7,
+
+ /*
+ * <p>The camera device can produce depth measurements from its field of view.</p>
+ * <p>This capability requires the camera device to support the following:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.</li>
+ * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
+ * output format.</li>
+ * <li>This camera device, and all camera devices with the same ACAMERA_LENS_FACING,
+ * will list the following calibration entries in both
+ * {@link android.hardware.camera2.CameraCharacteristics} and
+ * {@link android.hardware.camera2.CaptureResult}:<ul>
+ * <li>ACAMERA_LENS_POSE_TRANSLATION</li>
+ * <li>ACAMERA_LENS_POSE_ROTATION</li>
+ * <li>ACAMERA_LENS_INTRINSIC_CALIBRATION</li>
+ * <li>ACAMERA_LENS_RADIAL_DISTORTION</li>
+ * </ul>
+ * </li>
+ * <li>The ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE entry is listed by this device.</li>
+ * <li>A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
+ * normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
+ * format.</li>
+ * </ul>
+ * <p>Generally, depth output operates at a slower frame rate than standard color capture,
+ * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
+ * should be accounted for (see
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
+ * On a device that supports both depth and color-based output, to enable smooth preview,
+ * using a repeating burst is recommended, where a depth-output target is only included
+ * once every N frames, where N is the ratio between preview output rate and depth output
+ * rate, including depth stall time.</p>
+ *
+ * @see ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE
+ * @see ACAMERA_LENS_FACING
+ * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ */
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8,
- ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
- = 9,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
-// ACAMERA_SCALER_AVAILABLE_FORMATS
-typedef enum acamera_metadata_enum_acamera_scaler_available_formats {
- ACAMERA_SCALER_AVAILABLE_FORMATS_RAW16 = 0x20,
- ACAMERA_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE = 0x24,
- ACAMERA_SCALER_AVAILABLE_FORMATS_YV12 = 0x32315659,
- ACAMERA_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP = 0x11,
- ACAMERA_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED = 0x22,
- ACAMERA_SCALER_AVAILABLE_FORMATS_YCbCr_420_888 = 0x23,
- ACAMERA_SCALER_AVAILABLE_FORMATS_BLOB = 0x21,
-} acamera_metadata_enum_android_scaler_available_formats_t;
-
// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0,
+
ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1,
+
} acamera_metadata_enum_android_scaler_available_stream_configurations_t;
// ACAMERA_SCALER_CROPPING_TYPE
typedef enum acamera_metadata_enum_acamera_scaler_cropping_type {
+ /*
+ * <p>The camera device only supports centered crop regions.</p>
+ */
ACAMERA_SCALER_CROPPING_TYPE_CENTER_ONLY = 0,
+
+ /*
+ * <p>The camera device supports arbitrarily chosen crop regions.</p>
+ */
ACAMERA_SCALER_CROPPING_TYPE_FREEFORM = 1,
+
} acamera_metadata_enum_android_scaler_cropping_type_t;
// ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2,
+
+ /*
+ * <p>Incandescent light</p>
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11,
+
+ /*
+ * <p>D 5700 - 7100K</p>
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12,
+
+ /*
+ * <p>N 4600 - 5400K</p>
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13,
+
+ /*
+ * <p>W 3900 - 4500K</p>
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14,
+
+ /*
+ * <p>WW 3200 - 3700K</p>
+ */
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23,
+
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24,
+
} acamera_metadata_enum_android_sensor_reference_illuminant1_t;
// ACAMERA_SENSOR_TEST_PATTERN_MODE
typedef enum acamera_metadata_enum_acamera_sensor_test_pattern_mode {
+ /*
+ * <p>No test pattern mode is used, and the camera
+ * device returns captures from the image sensor.</p>
+ * <p>This is the default if the key is not set.</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE_OFF = 0,
+
+ /*
+ * <p>Each pixel in <code>[R, G_even, G_odd, B]</code> is replaced by its
+ * respective color channel provided in
+ * ACAMERA_SENSOR_TEST_PATTERN_DATA.</p>
+ * <p>For example:</p>
+ * <pre><code>android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
+ * </code></pre>
+ * <p>All green pixels are 100% green. All red/blue pixels are black.</p>
+ * <pre><code>android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
+ * </code></pre>
+ * <p>All red pixels are 100% red. Only the odd green pixels
+ * are 100% green. All blue pixels are 100% black.</p>
+ *
+ * @see ACAMERA_SENSOR_TEST_PATTERN_DATA
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR = 1,
+
+ /*
+ * <p>All pixel data is replaced with an 8-bar color pattern.</p>
+ * <p>The vertical bars (left-to-right) are as follows:</p>
+ * <ul>
+ * <li>100% white</li>
+ * <li>yellow</li>
+ * <li>cyan</li>
+ * <li>green</li>
+ * <li>magenta</li>
+ * <li>red</li>
+ * <li>blue</li>
+ * <li>black</li>
+ * </ul>
+ * <p>In general the image would look like the following:</p>
+ * <pre><code>W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * . . . . . . . .
+ * . . . . . . . .
+ * . . . . . . . .
+ *
+ * (B = Blue, K = Black)
+ * </code></pre>
+ * <p>Each bar should take up 1/8 of the sensor pixel array width.
+ * When this is not possible, the bar size should be rounded
+ * down to the nearest integer and the pattern can repeat
+ * on the right side.</p>
+ * <p>Each bar's height must always take up the full sensor
+ * pixel array height.</p>
+ * <p>Each pixel in this test pattern must be set to either
+ * 0% intensity or 100% intensity.</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS = 2,
+
+ /*
+ * <p>The test pattern is similar to COLOR_BARS, except that
+ * each bar should start at its specified color at the top,
+ * and fade to gray at the bottom.</p>
+ * <p>Furthermore each bar is further subdivided into a left and
+ * right half. The left half should have a smooth gradient,
+ * and the right half should have a quantized gradient.</p>
+ * <p>In particular, the right half's should consist of blocks of the
+ * same color for 1/16th active sensor pixel array width.</p>
+ * <p>The least significant bits in the quantized gradient should
+ * be copied from the most significant bits of the smooth gradient.</p>
+ * <p>The height of each bar should always be a multiple of 128.
+ * When this is not the case, the pattern should repeat at the bottom
+ * of the image.</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY = 3,
+
+ /*
+ * <p>All pixel data is replaced by a pseudo-random sequence
+ * generated from a PN9 512-bit sequence (typically implemented
+ * in hardware with a linear feedback shift register).</p>
+ * <p>The generator should be reset at the beginning of each frame,
+ * and thus each subsequent raw frame with this test pattern should
+ * be exactly the same as the last.</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE_PN9 = 4,
+
+ /*
+ * <p>The first custom test pattern. All custom patterns that are
+ * available only on this camera device are at least this numeric
+ * value.</p>
+ * <p>All of the custom test patterns will be static
+ * (that is the raw image must not vary from frame to frame).</p>
+ */
ACAMERA_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256,
+
} acamera_metadata_enum_android_sensor_test_pattern_mode_t;
// ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
typedef enum acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement {
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB = 0,
+
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1,
+
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2,
+
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3,
+
+ /*
+ * <p>Sensor is not Bayer; output has 3 16-bit
+ * values for each pixel, instead of just 1 16-bit value
+ * per pixel.</p>
+ */
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4,
+
} acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t;
// ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
+ /*
+ * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic,
+ * but can not be compared to timestamps from other subsystems
+ * (e.g. accelerometer, gyro etc.), or other instances of the same or different
+ * camera devices in the same system. Timestamps between streams and results for
+ * a single camera instance are comparable, and the timestamps for all buffers
+ * and the result metadata generated by a single capture are identical.</p>
+ *
+ * @see ACAMERA_SENSOR_TIMESTAMP
+ */
ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN = 0,
+
+ /*
+ * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
+ * {@link android.os.SystemClock#elapsedRealtimeNanos},
+ * and they can be compared to other timestamps using that base.</p>
+ *
+ * @see ACAMERA_SENSOR_TIMESTAMP
+ */
ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME = 1,
+
} acamera_metadata_enum_android_sensor_info_timestamp_source_t;
// ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED
typedef enum acamera_metadata_enum_acamera_sensor_info_lens_shading_applied {
ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE = 0,
+
ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE = 1,
+
} acamera_metadata_enum_android_sensor_info_lens_shading_applied_t;
// ACAMERA_SHADING_MODE
typedef enum acamera_metadata_enum_acamera_shading_mode {
+ /*
+ * <p>No lens shading correction is applied.</p>
+ */
ACAMERA_SHADING_MODE_OFF = 0,
+
+ /*
+ * <p>Apply lens shading corrections, without slowing
+ * frame rate relative to sensor raw output</p>
+ */
ACAMERA_SHADING_MODE_FAST = 1,
+
+ /*
+ * <p>Apply high-quality lens shading correction, at the
+ * cost of possibly reduced frame rate.</p>
+ */
ACAMERA_SHADING_MODE_HIGH_QUALITY = 2,
+
} acamera_metadata_enum_android_shading_mode_t;
// ACAMERA_STATISTICS_FACE_DETECT_MODE
typedef enum acamera_metadata_enum_acamera_statistics_face_detect_mode {
+ /*
+ * <p>Do not include face detection statistics in capture
+ * results.</p>
+ */
ACAMERA_STATISTICS_FACE_DETECT_MODE_OFF = 0,
+
+ /*
+ * <p>Return face rectangle and confidence values only.</p>
+ */
ACAMERA_STATISTICS_FACE_DETECT_MODE_SIMPLE = 1,
+
+ /*
+ * <p>Return all face
+ * metadata.</p>
+ * <p>In this mode, face rectangles, scores, landmarks, and face IDs are all valid.</p>
+ */
ACAMERA_STATISTICS_FACE_DETECT_MODE_FULL = 2,
+
} acamera_metadata_enum_android_statistics_face_detect_mode_t;
// ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE
typedef enum acamera_metadata_enum_acamera_statistics_hot_pixel_map_mode {
+ /*
+ * <p>Hot pixel map production is disabled.</p>
+ */
ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE_OFF = 0,
+
+ /*
+ * <p>Hot pixel map production is enabled.</p>
+ */
ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE_ON = 1,
+
} acamera_metadata_enum_android_statistics_hot_pixel_map_mode_t;
// ACAMERA_STATISTICS_SCENE_FLICKER
typedef enum acamera_metadata_enum_acamera_statistics_scene_flicker {
+ /*
+ * <p>The camera device does not detect any flickering illumination
+ * in the current scene.</p>
+ */
ACAMERA_STATISTICS_SCENE_FLICKER_NONE = 0,
+
+ /*
+ * <p>The camera device detects illumination flickering at 50Hz
+ * in the current scene.</p>
+ */
ACAMERA_STATISTICS_SCENE_FLICKER_50HZ = 1,
+
+ /*
+ * <p>The camera device detects illumination flickering at 60Hz
+ * in the current scene.</p>
+ */
ACAMERA_STATISTICS_SCENE_FLICKER_60HZ = 2,
+
} acamera_metadata_enum_android_statistics_scene_flicker_t;
// ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
typedef enum acamera_metadata_enum_acamera_statistics_lens_shading_map_mode {
+ /*
+ * <p>Do not include a lens shading map in the capture result.</p>
+ */
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE_OFF = 0,
+
+ /*
+ * <p>Include a lens shading map in the capture result.</p>
+ */
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE_ON = 1,
+
} acamera_metadata_enum_android_statistics_lens_shading_map_mode_t;
// ACAMERA_TONEMAP_MODE
typedef enum acamera_metadata_enum_acamera_tonemap_mode {
+ /*
+ * <p>Use the tone mapping curve specified in
+ * the ACAMERA_TONEMAPCURVE_* entries.</p>
+ * <p>All color enhancement and tonemapping must be disabled, except
+ * for applying the tonemapping curve specified by
+ * android.tonemap.curve.</p>
+ * <p>Must not slow down frame rate relative to raw
+ * sensor output.</p>
+ */
ACAMERA_TONEMAP_MODE_CONTRAST_CURVE = 0,
+
+ /*
+ * <p>Advanced gamma mapping and color enhancement may be applied, without
+ * reducing frame rate compared to raw sensor output.</p>
+ */
ACAMERA_TONEMAP_MODE_FAST = 1,
+
+ /*
+ * <p>High-quality gamma mapping and color enhancement will be applied, at
+ * the cost of possibly reduced frame rate compared to raw sensor output.</p>
+ */
ACAMERA_TONEMAP_MODE_HIGH_QUALITY = 2,
+
+ /*
+ * <p>Use the gamma value specified in ACAMERA_TONEMAP_GAMMA to peform
+ * tonemapping.</p>
+ * <p>All color enhancement and tonemapping must be disabled, except
+ * for applying the tonemapping curve specified by ACAMERA_TONEMAP_GAMMA.</p>
+ * <p>Must not slow down frame rate relative to raw sensor output.</p>
+ *
+ * @see ACAMERA_TONEMAP_GAMMA
+ */
ACAMERA_TONEMAP_MODE_GAMMA_VALUE = 3,
+
+ /*
+ * <p>Use the preset tonemapping curve specified in
+ * ACAMERA_TONEMAP_PRESET_CURVE to peform tonemapping.</p>
+ * <p>All color enhancement and tonemapping must be disabled, except
+ * for applying the tonemapping curve specified by
+ * ACAMERA_TONEMAP_PRESET_CURVE.</p>
+ * <p>Must not slow down frame rate relative to raw sensor output.</p>
+ *
+ * @see ACAMERA_TONEMAP_PRESET_CURVE
+ */
ACAMERA_TONEMAP_MODE_PRESET_CURVE = 4,
+
} acamera_metadata_enum_android_tonemap_mode_t;
// ACAMERA_TONEMAP_PRESET_CURVE
typedef enum acamera_metadata_enum_acamera_tonemap_preset_curve {
+ /*
+ * <p>Tonemapping curve is defined by sRGB</p>
+ */
ACAMERA_TONEMAP_PRESET_CURVE_SRGB = 0,
+
+ /*
+ * <p>Tonemapping curve is defined by ITU-R BT.709</p>
+ */
ACAMERA_TONEMAP_PRESET_CURVE_REC709 = 1,
+
} acamera_metadata_enum_android_tonemap_preset_curve_t;
-// ACAMERA_LED_TRANSMIT
-typedef enum acamera_metadata_enum_acamera_led_transmit {
- ACAMERA_LED_TRANSMIT_OFF = 0,
- ACAMERA_LED_TRANSMIT_ON = 1,
-} acamera_metadata_enum_android_led_transmit_t;
-
-// ACAMERA_LED_AVAILABLE_LEDS
-typedef enum acamera_metadata_enum_acamera_led_available_leds {
- ACAMERA_LED_AVAILABLE_LEDS_TRANSMIT = 0,
-} acamera_metadata_enum_android_led_available_leds_t;
-
// ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL
typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
+ /*
+ * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
+ * better.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
+ * {@link android.hardware.camera2.CameraDevice#createCaptureSession
+ * createCaptureSession} documentation are guaranteed to be supported.</p>
+ * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
+ * support for color image capture. The only exception is that the device may
+ * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
+ * measurements and not color images.</p>
+ * <p><code>LIMITED</code> devices and above require the use of ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * to lock exposure metering (and calculate flash power, for cameras with flash) before
+ * capturing a high-quality still image.</p>
+ * <p>A <code>LIMITED</code> device that only lists the <code>BACKWARDS_COMPATIBLE</code> capability is only
+ * required to support full-automatic operation and post-processing (<code>OFF</code> is not
+ * supported for ACAMERA_CONTROL_AE_MODE, ACAMERA_CONTROL_AF_MODE, or
+ * ACAMERA_CONTROL_AWB_MODE)</p>
+ * <p>Additional capabilities may optionally be supported by a <code>LIMITED</code>-level device, and
+ * can be checked for in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_AF_MODE
+ * @see ACAMERA_CONTROL_AWB_MODE
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0,
+
+ /*
+ * <p>This camera device is capable of supporting advanced imaging applications.</p>
+ * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
+ * {@link android.hardware.camera2.CameraDevice#createCaptureSession
+ * createCaptureSession} documentation are guaranteed to be supported.</p>
+ * <p>A <code>FULL</code> device will support below capabilities:</p>
+ * <ul>
+ * <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * <code>BURST_CAPTURE</code>)</li>
+ * <li>Per frame control (ACAMERA_SYNC_MAX_LATENCY <code>==</code> PER_FRAME_CONTROL)</li>
+ * <li>Manual sensor control (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains <code>MANUAL_SENSOR</code>)</li>
+ * <li>Manual post-processing control (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * <code>MANUAL_POST_PROCESSING</code>)</li>
+ * <li>The required exposure time range defined in ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE</li>
+ * <li>The required maxFrameDuration defined in ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION</li>
+ * </ul>
+ * <p>Note:
+ * Pre-API level 23, FULL devices also supported arbitrary cropping region
+ * (ACAMERA_SCALER_CROPPING_TYPE <code>== FREEFORM</code>); this requirement was relaxed in API level
+ * 23, and <code>FULL</code> devices may only support <code>CENTERED</code> cropping.</p>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * @see ACAMERA_SCALER_CROPPING_TYPE
+ * @see ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE
+ * @see ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION
+ * @see ACAMERA_SYNC_MAX_LATENCY
+ */
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1,
+
+ /*
+ * <p>This camera device is running in backward compatibility mode.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link
+ * android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
+ * documentation are supported.</p>
+ * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
+ * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
+ * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
+ * <code>LEGACY</code> device in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.</p>
+ * <p>In addition, the ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is not functional on <code>LEGACY</code>
+ * devices. Instead, every request that includes a JPEG-format output target is treated
+ * as triggering a still capture, internally executing a precapture trigger. This may
+ * fire the flash for flash power metering during precapture, and then fire the flash
+ * for the final capture, if a flash is available on the device and the AE mode is set to
+ * enable the flash.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2,
+
+ /*
+ * <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
+ * FULL-level capabilities.</p>
+ * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
+ * <code>LIMITED</code> tables in the {@link
+ * android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
+ * documentation are guaranteed to be supported.</p>
+ * <p>The following additional capabilities are guaranteed to be supported:</p>
+ * <ul>
+ * <li><code>YUV_REPROCESSING</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * <code>YUV_REPROCESSING</code>)</li>
+ * <li><code>RAW</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * <code>RAW</code>)</li>
+ * </ul>
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ */
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_3 = 3,
+
} acamera_metadata_enum_android_info_supported_hardware_level_t;
// ACAMERA_BLACK_LEVEL_LOCK
typedef enum acamera_metadata_enum_acamera_black_level_lock {
ACAMERA_BLACK_LEVEL_LOCK_OFF = 0,
+
ACAMERA_BLACK_LEVEL_LOCK_ON = 1,
+
} acamera_metadata_enum_android_black_level_lock_t;
// ACAMERA_SYNC_FRAME_NUMBER
typedef enum acamera_metadata_enum_acamera_sync_frame_number {
+ /*
+ * <p>The current result is not yet fully synchronized to any request.</p>
+ * <p>Synchronization is in progress, and reading metadata from this
+ * result may include a mix of data that have taken effect since the
+ * last synchronization time.</p>
+ * <p>In some future result, within ACAMERA_SYNC_MAX_LATENCY frames,
+ * this value will update to the actual frame number frame number
+ * the result is guaranteed to be synchronized to (as long as the
+ * request settings remain constant).</p>
+ *
+ * @see ACAMERA_SYNC_MAX_LATENCY
+ */
ACAMERA_SYNC_FRAME_NUMBER_CONVERGING = -1,
+
+ /*
+ * <p>The current result's synchronization status is unknown.</p>
+ * <p>The result may have already converged, or it may be in
+ * progress. Reading from this result may include some mix
+ * of settings from past requests.</p>
+ * <p>After a settings change, the new settings will eventually all
+ * take effect for the output buffers and results. However, this
+ * value will not change when that happens. Altering settings
+ * rapidly may provide outcomes using mixes of settings from recent
+ * requests.</p>
+ * <p>This value is intended primarily for backwards compatibility with
+ * the older camera implementations (for android.hardware.Camera).</p>
+ */
ACAMERA_SYNC_FRAME_NUMBER_UNKNOWN = -2,
+
} acamera_metadata_enum_android_sync_frame_number_t;
// ACAMERA_SYNC_MAX_LATENCY
typedef enum acamera_metadata_enum_acamera_sync_max_latency {
+ /*
+ * <p>Every frame has the requests immediately applied.</p>
+ * <p>Changing controls over multiple requests one after another will
+ * produce results that have those controls applied atomically
+ * each frame.</p>
+ * <p>All FULL capability devices will have this as their maxLatency.</p>
+ */
ACAMERA_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0,
+
+ /*
+ * <p>Each new frame has some subset (potentially the entire set)
+ * of the past requests applied to the camera settings.</p>
+ * <p>By submitting a series of identical requests, the camera device
+ * will eventually have the camera settings applied, but it is
+ * unknown when that exact point will be.</p>
+ * <p>All LEGACY capability devices will have this as their maxLatency.</p>
+ */
ACAMERA_SYNC_MAX_LATENCY_UNKNOWN = -1,
+
} acamera_metadata_enum_android_sync_max_latency_t;
@@ -977,13 +2731,17 @@
// ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_depth_available_depth_stream_configurations {
ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT = 0,
+
ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT = 1,
+
} acamera_metadata_enum_android_depth_available_depth_stream_configurations_t;
// ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE
typedef enum acamera_metadata_enum_acamera_depth_depth_is_exclusive {
ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE = 0,
+
ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE = 1,
+
} acamera_metadata_enum_android_depth_depth_is_exclusive_t;
diff --git a/include/media/IDataSource.h b/include/media/IDataSource.h
index 09009f0..838e29f 100644
--- a/include/media/IDataSource.h
+++ b/include/media/IDataSource.h
@@ -20,6 +20,7 @@
#include <binder/IInterface.h>
#include <media/stagefright/foundation/ABase.h>
#include <utils/Errors.h>
+#include <utils/String8.h>
namespace android {
@@ -44,6 +45,8 @@
// Get the flags of the source.
// Refer to DataSource:Flags for the definition of the flags.
virtual uint32_t getFlags() = 0;
+ // get a description of the source, e.g. the url or filename it is based on
+ virtual String8 toString() = 0;
private:
DISALLOW_EVIL_CONSTRUCTORS(IDataSource);
diff --git a/include/media/IMediaExtractor.h b/include/media/IMediaExtractor.h
index 9f7a719..d9fcd89 100644
--- a/include/media/IMediaExtractor.h
+++ b/include/media/IMediaExtractor.h
@@ -19,6 +19,7 @@
#define IMEDIA_EXTRACTOR_BASE_H_
#include <media/IMediaSource.h>
+#include <media/IDataSource.h>
namespace android {
@@ -69,6 +70,17 @@
uint32_t flags = 0);
};
+void registerMediaExtractor(
+ const sp<IMediaExtractor> &extractor,
+ const sp<IDataSource> &source,
+ const char *mime);
+
+void registerMediaSource(
+ const sp<IMediaExtractor> &extractor,
+ const sp<IMediaSource> &source);
+
+status_t dumpExtractors(int fd, const Vector<String16>& args);
+
} // namespace android
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index fab92bd..b22e0b4 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -302,6 +302,7 @@
bool mTunneled;
OMX_INDEXTYPE mDescribeColorAspectsIndex;
+ OMX_INDEXTYPE mDescribeHDRStaticInfoIndex;
status_t setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode);
status_t allocateBuffersOnPort(OMX_U32 portIndex);
@@ -421,6 +422,23 @@
// unspecified values.
void onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects);
+ // gets index or sets it to 0 on error. Returns error from codec.
+ status_t initDescribeHDRStaticInfoIndex();
+
+ // sets HDR static information for the decoder based on |configFormat|, and
+ // set resulting HDRStaticInfo config into |outputFormat|. Returns error from the codec.
+ status_t setHDRStaticInfoForVideoDecoder(
+ const sp<AMessage> &configFormat, sp<AMessage> &outputFormat);
+
+ // sets |params|. Returns the codec error.
+ status_t setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms);
+
+ // gets |params|. Returns the codec error.
+ status_t getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms);
+
+ // gets HDR static information for the video decoder port and sets them into |format|.
+ status_t getHDRStaticInfoForVideoDecoder(sp<AMessage> &format);
+
typedef struct drcParams {
int32_t drcCut;
int32_t drcBoost;
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index c5e09c0..c5df1f6 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -78,6 +78,10 @@
return 0;
}
+ virtual String8 toString() {
+ return String8("<unspecified>");
+ }
+
virtual status_t reconnectAtOffset(off64_t offset) {
return ERROR_UNSUPPORTED;
}
diff --git a/include/media/stagefright/FileSource.h b/include/media/stagefright/FileSource.h
index 266168b..b6349e0 100644
--- a/include/media/stagefright/FileSource.h
+++ b/include/media/stagefright/FileSource.h
@@ -43,6 +43,10 @@
virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
+ virtual String8 toString() {
+ return mName;
+ }
+
protected:
virtual ~FileSource();
@@ -51,6 +55,7 @@
int64_t mOffset;
int64_t mLength;
Mutex mLock;
+ String8 mName;
/*for DRM*/
sp<DecryptHandle> mDecryptHandle;
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index a9ae49b..4f7426d 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -258,6 +258,7 @@
bool hasData(uint32_t key) const;
+ String8 toString() const;
void dumpToLog() const;
status_t writeToParcel(Parcel &parcel);
@@ -278,7 +279,8 @@
void clear();
void setData(uint32_t type, const void *data, size_t size);
void getData(uint32_t *type, const void **data, size_t *size) const;
- String8 asString() const;
+ // may include hexdump of binary data if verbose=true
+ String8 asString(bool verbose) const;
private:
uint32_t mType;
diff --git a/include/media/stagefright/foundation/ColorUtils.h b/include/media/stagefright/foundation/ColorUtils.h
index f01a210..2368b82 100644
--- a/include/media/stagefright/foundation/ColorUtils.h
+++ b/include/media/stagefright/foundation/ColorUtils.h
@@ -169,6 +169,13 @@
// written.
static void setColorAspectsIntoFormat(
const ColorAspects &aspects, sp<AMessage> &format, bool force = false);
+
+ // finds HDR metadata in format as HDRStaticInfo, defaulting them to 0.
+ // Return |true| if could find HDR metadata in format. Otherwise, return |false|.
+ static bool getHDRStaticInfoFromFormat(const sp<AMessage> &format, HDRStaticInfo *info);
+
+ // writes |info| into format.
+ static void setHDRStaticInfoIntoFormat(const HDRStaticInfo &info, sp<AMessage> &format);
};
inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/include/media/stagefright/foundation/Mutexed.h b/include/media/stagefright/foundation/Mutexed.h
index e905d86..143b140 100644
--- a/include/media/stagefright/foundation/Mutexed.h
+++ b/include/media/stagefright/foundation/Mutexed.h
@@ -103,6 +103,10 @@
class Locked {
public:
inline Locked(Mutexed<T> &mParent);
+ inline Locked(Locked &&from) :
+ mLock(from.mLock),
+ mTreasure(from.mTreasure),
+ mLocked(from.mLocked) {}
inline ~Locked();
// dereference the protected structure. This returns nullptr if the
@@ -148,9 +152,9 @@
// Lock the mutex, and create an accessor-guard (a Locked object) to access the underlying
// structure. This returns an object that dereferences to the wrapped structure when the mutex
// is locked by it, or otherwise to "null".
- inline Locked&& lock() {
- // use rvalue as Locked has no copy constructor
- return std::move(Locked(*this));
+ // This is just a shorthand for Locked() constructor to avoid specifying the template type.
+ inline Locked lock() {
+ return Locked(*this);
}
private:
@@ -169,7 +173,6 @@
mTreasure(mParent.mTreasure),
mLocked(true) {
mLock.lock();
-
}
template<typename T>
diff --git a/media/libmedia/IDataSource.cpp b/media/libmedia/IDataSource.cpp
index ac864a4..7aeba5a 100644
--- a/media/libmedia/IDataSource.cpp
+++ b/media/libmedia/IDataSource.cpp
@@ -33,6 +33,7 @@
GET_SIZE,
CLOSE,
GET_FLAGS,
+ TO_STRING,
};
struct BpDataSource : public BpInterface<IDataSource> {
@@ -76,6 +77,13 @@
remote()->transact(GET_FLAGS, data, &reply);
return reply.readUint32();
}
+
+ virtual String8 toString() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDataSource::getInterfaceDescriptor());
+ remote()->transact(TO_STRING, data, &reply);
+ return reply.readString8();
+ }
};
IMPLEMENT_META_INTERFACE(DataSource, "android.media.IDataSource");
@@ -113,6 +121,12 @@
reply->writeUint32(getFlags());
return NO_ERROR;
} break;
+ case TO_STRING: {
+ CHECK_INTERFACE(IDataSource, data, reply);
+ reply->writeString8(toString());
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IMediaExtractor.cpp b/media/libmedia/IMediaExtractor.cpp
index 76d5648..b13b69f 100644
--- a/media/libmedia/IMediaExtractor.cpp
+++ b/media/libmedia/IMediaExtractor.cpp
@@ -21,6 +21,7 @@
#include <stdint.h>
#include <sys/types.h>
+#include <binder/IPCThreadState.h>
#include <binder/Parcel.h>
#include <media/IMediaExtractor.h>
#include <media/stagefright/MetaData.h>
@@ -145,7 +146,9 @@
CHECK_INTERFACE(IMediaExtractor, data, reply);
uint32_t idx;
if (data.readUint32(&idx) == NO_ERROR) {
- return reply->writeStrongBinder(IInterface::asBinder(getTrack((size_t(idx)))));
+ const sp<IMediaSource> track = getTrack(size_t(idx));
+ registerMediaSource(this, track);
+ return reply->writeStrongBinder(IInterface::asBinder(track));
}
return UNKNOWN_ERROR;
}
@@ -177,6 +180,90 @@
}
}
+typedef struct {
+ String8 mime;
+ String8 name;
+ String8 sourceDescription;
+ pid_t owner;
+ wp<IMediaExtractor> extractor;
+ Vector<wp<IMediaSource>> tracks;
+ Vector<String8> trackDescriptions;
+ String8 toString() const;
+} ExtractorInstance;
+
+String8 ExtractorInstance::toString() const {
+ String8 str = name;
+ str.append(" for mime ");
+ str.append(mime);
+ str.append(", source ");
+ str.append(sourceDescription);
+ str.append(String8::format(", pid %d: ", owner));
+ if (extractor.promote() == NULL) {
+ str.append("deleted\n");
+ } else {
+ str.append("active\n");
+ }
+ for (size_t i = 0; i < tracks.size(); i++) {
+ const String8 desc = trackDescriptions.itemAt(i);
+ str.appendFormat(" track {%s} ", desc.string());
+ const sp<IMediaSource> source = tracks.itemAt(i).promote();
+ if (source == NULL) {
+ str.append(": deleted\n");
+ } else {
+ str.appendFormat(": active\n");
+ }
+ }
+ return str;
+}
+
+static Vector<ExtractorInstance> extractors;
+
+void registerMediaSource(
+ const sp<IMediaExtractor> &ex,
+ const sp<IMediaSource> &source) {
+ for (size_t i = 0; i < extractors.size(); i++) {
+ ExtractorInstance &instance = extractors.editItemAt(i);
+ sp<IMediaExtractor> extractor = instance.extractor.promote();
+ if (extractor != NULL && extractor == ex) {
+ if (instance.tracks.size() > 5) {
+ instance.tracks.resize(5);
+ }
+ instance.tracks.push_front(source);
+ instance.trackDescriptions.add(source->getFormat()->toString());
+ break;
+ }
+ }
+}
+
+void registerMediaExtractor(
+ const sp<IMediaExtractor> &extractor,
+ const sp<IDataSource> &source,
+ const char *mime) {
+ ExtractorInstance ex;
+ ex.mime = mime == NULL ? "NULL" : mime;
+ ex.name = extractor->name();
+ ex.sourceDescription = source->toString();
+ ex.owner = IPCThreadState::self()->getCallingPid();
+ ex.extractor = extractor;
+
+ if (extractors.size() > 10) {
+ extractors.resize(10);
+ }
+ extractors.push_front(ex);
+}
+
+status_t dumpExtractors(int fd, const Vector<String16>&) {
+ String8 out;
+ out.append("Recent extractors, most recent first:\n");
+ for (size_t i = 0; i < extractors.size(); i++) {
+ const ExtractorInstance &instance = extractors.itemAt(i);
+ out.append(" ");
+ out.append(instance.toString());
+ }
+ write(fd, out.string(), out.size());
+ return OK;
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 08ac941..81aafbe 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1428,7 +1428,11 @@
if (err == OK) {
int64_t timeUs;
- CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
+ if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
+ mbuf->meta_data()->dumpToLog();
+ track->mPackets->signalEOS(ERROR_MALFORMED);
+ break;
+ }
if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
mAudioTimeUs = timeUs;
mBufferingMonitor->updateQueuedTime(true /* isAudio */, timeUs);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index aebe479..7c903ea 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -521,7 +521,8 @@
mTimePerCaptureUs(-1ll),
mCreateInputBuffersSuspended(false),
mTunneled(false),
- mDescribeColorAspectsIndex((OMX_INDEXTYPE)0) {
+ mDescribeColorAspectsIndex((OMX_INDEXTYPE)0),
+ mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) {
mUninitializedState = new UninitializedState(this);
mLoadedState = new LoadedState(this);
mLoadedToIdleState = new LoadedToIdleState(this);
@@ -721,7 +722,7 @@
if (storingMetadataInDecodedBuffers()
&& !mLegacyAdaptiveExperiment
&& info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
- ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer());
+ ALOGV("skipping buffer");
continue;
}
ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer());
@@ -3186,6 +3187,15 @@
if (err == ERROR_UNSUPPORTED) { // support is optional
err = OK;
}
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = setHDRStaticInfoForVideoDecoder(msg, outputFormat);
+ if (err == ERROR_UNSUPPORTED) { // support is optional
+ err = OK;
+ }
return err;
}
@@ -3216,7 +3226,7 @@
}
ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex,
- "[%s] getting color aspects failed even though codec advertises support",
+ "[%s] setting color aspects failed even though codec advertises support",
mComponentName.c_str());
return err;
}
@@ -3434,6 +3444,78 @@
return err;
}
+status_t ACodec::getHDRStaticInfoForVideoDecoder(sp<AMessage> &format) {
+ DescribeHDRStaticInfoParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ status_t err = getHDRStaticInfo(params);
+ if (err == OK) {
+ // we only set decodec output HDRStaticInfo if codec supports them
+ setHDRStaticInfoIntoFormat(params.sInfo, format);
+ }
+ return err;
+}
+
+status_t ACodec::initDescribeHDRStaticInfoIndex() {
+ status_t err = mOMX->getExtensionIndex(
+ mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex);
+ if (err != OK) {
+ mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0;
+ }
+ return err;
+}
+
+status_t ACodec::setHDRStaticInfoForVideoDecoder(
+ const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
+ DescribeHDRStaticInfoParams params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ HDRStaticInfo *info = ¶ms.sInfo;
+ if (getHDRStaticInfoFromFormat(configFormat, info)) {
+ setHDRStaticInfoIntoFormat(params.sInfo, outputFormat);
+ }
+
+ (void)initDescribeHDRStaticInfoIndex();
+
+ // communicate HDR static Info to codec
+ return setHDRStaticInfo(params);
+}
+
+status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) {
+ status_t err = ERROR_UNSUPPORTED;
+ if (mDescribeHDRStaticInfoIndex) {
+ err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
+ }
+
+ const HDRStaticInfo *info = ¶ms.sInfo;
+ ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, "
+ "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)",
+ mComponentName.c_str(),
+ info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y,
+ info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y,
+ info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance,
+ info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel);
+
+ ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
+ "[%s] setting HDRStaticInfo failed even though codec advertises support",
+ mComponentName.c_str());
+ return err;
+}
+
+status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) {
+ status_t err = ERROR_UNSUPPORTED;
+ if (mDescribeHDRStaticInfoIndex) {
+ err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
+ }
+
+ ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
+ "[%s] getting HDRStaticInfo failed even though codec advertises support",
+ mComponentName.c_str());
+ return err;
+}
+
status_t ACodec::setupVideoEncoder(
const char *mime, const sp<AMessage> &msg,
sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
@@ -4647,6 +4729,7 @@
if (mUsingNativeWindow) {
notify->setInt32("android._dataspace", dataSpace);
}
+ (void)getHDRStaticInfoForVideoDecoder(notify);
} else {
(void)getInputColorAspectsForVideoEncoder(notify);
}
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index f28ac58..c8b61ca 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -55,8 +55,12 @@
: mStarted(false),
mSampleRate(sampleRate),
mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),
+ mTrackMaxAmplitude(false),
+ mStartTimeUs(0),
+ mMaxAmplitude(0),
mPrevSampleTimeUs(0),
mFirstSampleTimeUs(-1ll),
+ mInitialReadTimeUs(0),
mNumFramesReceived(0),
mNumClientOwnedBuffers(0) {
ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u",
diff --git a/media/libstagefright/CallbackDataSource.cpp b/media/libstagefright/CallbackDataSource.cpp
index bcbd78d..f14d34d 100644
--- a/media/libstagefright/CallbackDataSource.cpp
+++ b/media/libstagefright/CallbackDataSource.cpp
@@ -34,6 +34,8 @@
mIsClosed(false) {
// Set up the buffer to read into.
mMemory = mIDataSource->getIMemory();
+ mName = String8::format("CallbackDataSource(%s)", mIDataSource->toString().string());
+
}
CallbackDataSource::~CallbackDataSource() {
@@ -109,6 +111,7 @@
TinyCacheSource::TinyCacheSource(const sp<DataSource>& source)
: mSource(source), mCachedOffset(0), mCachedSize(0) {
+ mName = String8::format("TinyCacheSource(%s)", mSource->toString().string());
}
status_t TinyCacheSource::initCheck() const {
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index 5e0baa4..5b92f91 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -33,12 +33,16 @@
: mFd(-1),
mOffset(0),
mLength(-1),
+ mName("<null>"),
mDecryptHandle(NULL),
mDrmManagerClient(NULL),
mDrmBufOffset(0),
mDrmBufSize(0),
mDrmBuf(NULL){
+ if (filename) {
+ mName = String8::format("FileSource(%s)", filename);
+ }
ALOGV("%s", filename);
mFd = open(filename, O_LARGEFILE | O_RDONLY);
@@ -53,16 +57,46 @@
: mFd(fd),
mOffset(offset),
mLength(length),
+ mName("<null>"),
mDecryptHandle(NULL),
mDrmManagerClient(NULL),
mDrmBufOffset(0),
mDrmBufSize(0),
- mDrmBuf(NULL){
+ mDrmBuf(NULL) {
ALOGV("fd=%d (%s), offset=%lld, length=%lld",
fd, nameForFd(fd).c_str(), (long long) offset, (long long) length);
- CHECK(offset >= 0);
- CHECK(length >= 0);
+ if (mOffset < 0) {
+ mOffset = 0;
+ }
+ if (mLength < 0) {
+ mLength = 0;
+ }
+ if (mLength > INT64_MAX - mOffset) {
+ mLength = INT64_MAX - mOffset;
+ }
+ struct stat s;
+ if (fstat(fd, &s) == 0) {
+ if (mOffset > s.st_size) {
+ mOffset = s.st_size;
+ mLength = 0;
+ }
+ if (mOffset + mLength > s.st_size) {
+ mLength = s.st_size - mOffset;
+ }
+ }
+ if (mOffset != offset || mLength != length) {
+ ALOGW("offset/length adjusted from %lld/%lld to %lld/%lld",
+ (long long) offset, (long long) length,
+ (long long) mOffset, (long long) mLength);
+ }
+
+ mName = String8::format(
+ "FileSource(fd(%s), %lld, %lld)",
+ nameForFd(fd).c_str(),
+ (long long) mOffset,
+ (long long) mLength);
+
}
FileSource::~FileSource() {
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 068a77f..0f24329 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -38,6 +38,7 @@
mPrevBandwidthMeasureTimeUs(0),
mPrevEstimatedBandWidthKbps(0),
mBandWidthCollectFreqMs(5000) {
+ mName = String8("HTTPBase(<disconnected>)");
}
void HTTPBase::addBandwidthMeasurement(
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 7240e1a..82e7a26 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -252,6 +252,7 @@
mDataSource(source),
mFirstFramePos(-1),
mFixedHeader(0) {
+
off64_t pos = 0;
off64_t post_id3_pos;
uint32_t header;
@@ -350,7 +351,13 @@
if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) {
off64_t fileSize;
if (mDataSource->getSize(&fileSize) == OK) {
- durationUs = 8000LL * (fileSize - mFirstFramePos) / bitrate;
+ off64_t dataLength = fileSize - mFirstFramePos;
+ if (dataLength > INT64_MAX / 8000LL) {
+ // duration would overflow
+ durationUs = INT64_MAX;
+ } else {
+ durationUs = 8000LL * dataLength / bitrate;
+ }
} else {
durationUs = -1;
}
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index fe66a58..1c6c882 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -87,10 +87,12 @@
virtual status_t getSize(off64_t* size);
virtual void close();
virtual uint32_t getFlags();
+ virtual String8 toString();
private:
sp<IMemory> mMemory;
sp<DataSource> mSource;
+ String8 mName;
RemoteDataSource(const sp<DataSource> &source);
DISALLOW_EVIL_CONSTRUCTORS(RemoteDataSource);
};
@@ -106,6 +108,7 @@
if (mMemory == NULL) {
ALOGE("Failed to allocate memory!");
}
+ mName = String8::format("RemoteDataSource(%s)", mSource->toString().string());
}
RemoteDataSource::~RemoteDataSource() {
close();
@@ -127,6 +130,10 @@
return mSource->flags();
}
+String8 RemoteDataSource::toString() {
+ return mName;
+}
+
// static
sp<IMediaExtractor> MediaExtractor::Create(
const sp<DataSource> &source, const char *mime) {
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index d6255d6..453db03 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -224,6 +224,8 @@
// So whenever we call DataSource::readAt it may end up in a call to
// IMediaHTTPConnection::readAt and therefore call back into JAVA.
mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);
+
+ mName = String8::format("NuCachedSource2(%s)", mSource->toString().string());
}
NuCachedSource2::~NuCachedSource2() {
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index cc38a12..271c69b 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -55,7 +55,9 @@
}
mSelectedTracks.clear();
- mDataSource->close();
+ if (mDataSource != NULL) {
+ mDataSource->close();
+ }
}
status_t NuMediaExtractor::setDataSource(
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 1fc5995..e88dfa8 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -198,6 +198,10 @@
mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
}
+ // By default, the decoder creates a 5.1 channel downmix signal.
+ // For seven and eight channel input streams, enable 6.1 and 7.1 channel output
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1);
+
return status;
}
@@ -341,7 +345,7 @@
// for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
// a value of -1 implies the parameter is not set by the application:
- // nMaxOutputChannels uses default platform properties, see configureDownmix()
+ // nMaxOutputChannels -1 by default
// nDrcCut uses default platform properties, see initDecoder()
// nDrcBoost idem
// nHeavyCompression idem
@@ -425,18 +429,6 @@
return mInputBufferCount > 0;
}
-void SoftAAC2::configureDownmix() const {
- char value[PROPERTY_VALUE_MAX];
- if (!(property_get("media.aac_51_output_enabled", value, NULL)
- && (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
- ALOGI("limiting to stereo output");
- aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, 2);
- // By default, the decoder creates a 5.1 channel downmix signal
- // for seven and eight channel input streams. To enable 6.1 and 7.1 channel output
- // use aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1)
- }
-}
-
bool SoftAAC2::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
if (numSamples == 0) {
return true;
@@ -571,7 +563,6 @@
notifyEmptyBufferDone(inHeader);
inHeader = NULL;
- configureDownmix();
// Only send out port settings changed event if both sample rate
// and numChannels are valid.
if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index c3e4459..a1cf285 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -74,7 +74,6 @@
void initPorts();
status_t initDecoder();
bool isConfigured() const;
- void configureDownmix() const;
void drainDecoder();
// delay compensation
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
index 3bdf93a..d59f129 100644
--- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
+++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
@@ -1028,13 +1028,13 @@
vo_p1 = xn2;
for (i = 0; i < L_SUBFR/2; i++)
{
- L_tmp += *vo_p0 * *vo_p0;
+ L_tmp = L_add(L_tmp, *vo_p0 * *vo_p0);
vo_p0++;
- L_tmp -= *vo_p1 * *vo_p1;
+ L_tmp = L_sub(L_tmp, *vo_p1 * *vo_p1);
vo_p1++;
- L_tmp += *vo_p0 * *vo_p0;
+ L_tmp = L_add(L_tmp, *vo_p0 * *vo_p0);
vo_p0++;
- L_tmp -= *vo_p1 * *vo_p1;
+ L_tmp = L_sub(L_tmp, *vo_p1 * *vo_p1);
vo_p1++;
}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index 0c1a149..bb59ae4 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -229,6 +229,14 @@
int32_t bufferSize = inHeader->nFilledLen;
int32_t tmp = bufferSize;
+ OMX_U32 frameSize = (mWidth * mHeight * 3) / 2;
+ if (outHeader->nAllocLen < frameSize) {
+ android_errorWriteLog(0x534e4554, "27833616");
+ ALOGE("Insufficient output buffer size");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
// The PV decoder is lying to us, sometimes it'll claim to only have
// consumed a subset of the buffer when it clearly consumed all of it.
// ignore whatever it says...
@@ -272,7 +280,7 @@
++mInputBufferCount;
outHeader->nOffset = 0;
- outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+ outHeader->nFilledLen = frameSize;
List<BufferInfo *>::iterator it = outQueue.begin();
while ((*it)->mHeader != outHeader) {
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index 9988015..4cde54e 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -263,6 +263,14 @@
mConfig->inputBufferUsedLength = 0;
mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
+ if ((int32)outHeader->nAllocLen < mConfig->outputFrameSize) {
+ ALOGE("input buffer too small: got %u, expected %u",
+ outHeader->nAllocLen, mConfig->outputFrameSize);
+ android_errorWriteLog(0x534e4554, "27793371");
+ notify(OMX_EventError, OMX_ErrorUndefined, OUTPUT_BUFFER_TOO_SMALL, NULL);
+ mSignalledError = true;
+ return;
+ }
mConfig->pOutputBuffer =
reinterpret_cast<int16_t *>(outHeader->pBuffer);
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 6b8b395..2f61d12 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -202,7 +202,12 @@
}
if (mFirstPicture && !outQueue.empty()) {
- drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
+ if (!drainOneOutputBuffer(mFirstPictureId, mFirstPicture)) {
+ ALOGE("Drain failed");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
delete[] mFirstPicture;
mFirstPicture = NULL;
mFirstPictureId = -1;
@@ -242,15 +247,20 @@
memcpy(mFirstPicture, data, pictureSize);
}
-void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
+bool SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
BufferInfo *outInfo = *outQueue.begin();
- outQueue.erase(outQueue.begin());
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ OMX_U32 frameSize = mWidth * mHeight * 3 / 2;
+ if (outHeader->nAllocLen - outHeader->nOffset < frameSize) {
+ android_errorWriteLog(0x534e4554, "27833616");
+ return false;
+ }
+ outQueue.erase(outQueue.begin());
OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
outHeader->nTimeStamp = header->nTimeStamp;
outHeader->nFlags = header->nFlags;
- outHeader->nFilledLen = mWidth * mHeight * 3 / 2;
+ outHeader->nFilledLen = frameSize;
uint8_t *dst = outHeader->pBuffer + outHeader->nOffset;
const uint8_t *srcY = data;
@@ -265,6 +275,7 @@
delete header;
outInfo->mOwnedByUs = false;
notifyFillBufferDone(outHeader);
+ return true;
}
void SoftAVC::drainAllOutputBuffers(bool eos) {
@@ -277,7 +288,12 @@
mHandle, &decodedPicture, eos /* flush */)) {
int32_t picId = decodedPicture.picId;
uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
- drainOneOutputBuffer(picId, data);
+ if (!drainOneOutputBuffer(picId, data)) {
+ ALOGE("Drain failed");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
}
}
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index 069107d..b8c1807 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -71,7 +71,7 @@
status_t initDecoder();
void drainAllOutputBuffers(bool eos);
- void drainOneOutputBuffer(int32_t picId, uint8_t *data);
+ bool drainOneOutputBuffer(int32_t picId, uint8_t *data);
void saveFirstOutputBuffer(int32_t pidId, uint8_t *data);
CropSettingsMode handleCropParams(const H264SwDecInfo& decInfo);
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 99031ca..e329766 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -18,7 +18,8 @@
#define LOG_TAG "ColorUtils"
#include <inttypes.h>
-
+#include <arpa/inet.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALookup.h>
#include <media/stagefright/foundation/ColorUtils.h>
@@ -29,6 +30,9 @@
typedef ColorAspects CA;
typedef ColorUtils CU;
+#define HI_UINT16(a) (((a) >> 8) & 0xFF)
+#define LO_UINT16(a) ((a) & 0xFF)
+
const static
ALookup<CU::ColorRange, CA::Range> sRanges{
{
@@ -578,5 +582,105 @@
transfer, asString((ColorTransfer)transfer));
}
+// static
+void ColorUtils::setHDRStaticInfoIntoFormat(
+ const HDRStaticInfo &info, sp<AMessage> &format) {
+ sp<ABuffer> infoBuffer = new ABuffer(25);
+
+ // Convert the data in infoBuffer to little endian format as defined by CTA-861-3
+ uint8_t *data = infoBuffer->data();
+ // Static_Metadata_Descriptor_ID
+ data[0] = info.mID;
+
+ // display primary 0
+ data[1] = LO_UINT16(info.sType1.mR.x);
+ data[2] = HI_UINT16(info.sType1.mR.x);
+ data[3] = LO_UINT16(info.sType1.mR.y);
+ data[4] = HI_UINT16(info.sType1.mR.y);
+
+ // display primary 1
+ data[5] = LO_UINT16(info.sType1.mG.x);
+ data[6] = HI_UINT16(info.sType1.mG.x);
+ data[7] = LO_UINT16(info.sType1.mG.y);
+ data[8] = HI_UINT16(info.sType1.mG.y);
+
+ // display primary 2
+ data[9] = LO_UINT16(info.sType1.mB.x);
+ data[10] = HI_UINT16(info.sType1.mB.x);
+ data[11] = LO_UINT16(info.sType1.mB.y);
+ data[12] = HI_UINT16(info.sType1.mB.y);
+
+ // white point
+ data[13] = LO_UINT16(info.sType1.mW.x);
+ data[14] = HI_UINT16(info.sType1.mW.x);
+ data[15] = LO_UINT16(info.sType1.mW.y);
+ data[16] = HI_UINT16(info.sType1.mW.y);
+
+ // MaxDisplayLuminance
+ data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
+ data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
+
+ // MinDisplayLuminance
+ data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
+ data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
+
+ // MaxContentLightLevel
+ data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
+ data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
+
+ // MaxFrameAverageLightLevel
+ data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+ data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+
+ format->setBuffer("hdr-static-info", infoBuffer);
+}
+
+// a simple method copied from Utils.cpp
+static uint16_t U16LE_AT(const uint8_t *ptr) {
+ return ptr[0] | (ptr[1] << 8);
+}
+
+// static
+bool ColorUtils::getHDRStaticInfoFromFormat(const sp<AMessage> &format, HDRStaticInfo *info) {
+ sp<ABuffer> buf;
+ if (!format->findBuffer("hdr-static-info", &buf)) {
+ return false;
+ }
+
+ // TODO: Make this more flexible when adding more members to HDRStaticInfo
+ if (buf->size() != 25 /* static Metadata Type 1 size */) {
+ ALOGW("Ignore invalid HDRStaticInfo with size: %zu", buf->size());
+ return false;
+ }
+
+ const uint8_t *data = buf->data();
+ if (*data != HDRStaticInfo::kType1) {
+ ALOGW("Unsupported static Metadata Type %u", *data);
+ return false;
+ }
+
+ info->mID = HDRStaticInfo::kType1;
+ info->sType1.mR.x = U16LE_AT(&data[1]);
+ info->sType1.mR.y = U16LE_AT(&data[3]);
+ info->sType1.mG.x = U16LE_AT(&data[5]);
+ info->sType1.mG.y = U16LE_AT(&data[7]);
+ info->sType1.mB.x = U16LE_AT(&data[9]);
+ info->sType1.mB.y = U16LE_AT(&data[11]);
+ info->sType1.mW.x = U16LE_AT(&data[13]);
+ info->sType1.mW.y = U16LE_AT(&data[15]);
+ info->sType1.mMaxDisplayLuminance = U16LE_AT(&data[17]);
+ info->sType1.mMinDisplayLuminance = U16LE_AT(&data[19]);
+ info->sType1.mMaxContentLightLevel = U16LE_AT(&data[21]);
+ info->sType1.mMaxFrameAverageLightLevel = U16LE_AT(&data[23]);
+
+ ALOGV("Got HDRStaticInfo from config (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, "
+ "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)",
+ info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y,
+ info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y,
+ info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance,
+ info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel);
+ return true;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index b847eed..b4abc60 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -316,7 +316,7 @@
mSize = 0;
}
-String8 MetaData::typed_data::asString() const {
+String8 MetaData::typed_data::asString(bool verbose) const {
String8 out;
const void *data = storage();
switch(mType) {
@@ -348,7 +348,7 @@
default:
out = String8::format("(unknown type %d, size %zu)", mType, mSize);
- if (mSize <= 48) { // if it's less than three lines of hex data, dump it
+ if (verbose && mSize <= 48) { // if it's less than three lines of hex data, dump it
AString foo;
hexdump(data, mSize, 0, &foo);
out.append("\n");
@@ -367,13 +367,27 @@
s[4] = '\0';
}
+String8 MetaData::toString() const {
+ String8 s;
+ for (int i = mItems.size(); --i >= 0;) {
+ int32_t key = mItems.keyAt(i);
+ char cc[5];
+ MakeFourCCString(key, cc);
+ const typed_data &item = mItems.valueAt(i);
+ s.appendFormat("%s: %s", cc, item.asString(false).string());
+ if (i != 0) {
+ s.append(", ");
+ }
+ }
+ return s;
+}
void MetaData::dumpToLog() const {
for (int i = mItems.size(); --i >= 0;) {
int32_t key = mItems.keyAt(i);
char cc[5];
MakeFourCCString(key, cc);
const typed_data &item = mItems.valueAt(i);
- ALOGI("%s: %s", cc, item.asString().string());
+ ALOGI("%s: %s", cc, item.asString(true /* verbose */).string());
}
}
diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp
index 801ff26..76ec625 100644
--- a/media/libstagefright/http/MediaHTTP.cpp
+++ b/media/libstagefright/http/MediaHTTP.cpp
@@ -65,10 +65,16 @@
mCachedSizeValid = false;
+ if (success) {
+ AString sanitized = uriDebugString(uri);
+ mName = String8::format("MediaHTTP(%s)", sanitized.c_str());
+ }
+
return success ? OK : UNKNOWN_ERROR;
}
void MediaHTTP::disconnect() {
+ mName = String8("MediaHTTP(<disconnected>)");
if (mInitCheck != OK) {
return;
}
diff --git a/media/libstagefright/include/CallbackDataSource.h b/media/libstagefright/include/CallbackDataSource.h
index 9b33810..43e9b8d 100644
--- a/media/libstagefright/include/CallbackDataSource.h
+++ b/media/libstagefright/include/CallbackDataSource.h
@@ -38,11 +38,15 @@
virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
virtual void close();
+ virtual String8 toString() {
+ return mName;
+ }
private:
sp<IDataSource> mIDataSource;
sp<IMemory> mMemory;
bool mIsClosed;
+ String8 mName;
DISALLOW_EVIL_CONSTRUCTORS(CallbackDataSource);
};
@@ -61,6 +65,9 @@
virtual status_t getSize(off64_t* size);
virtual uint32_t flags();
virtual void close() { mSource->close(); }
+ virtual String8 toString() {
+ return mName;
+ }
private:
// 2kb comes from experimenting with the time-to-first-frame from a MediaPlayer
@@ -74,6 +81,7 @@
uint8_t mCache[kCacheSize];
off64_t mCachedOffset;
size_t mCachedSize;
+ String8 mName;
DISALLOW_EVIL_CONSTRUCTORS(TinyCacheSource);
};
diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h
index 0c66e27..d325e30 100644
--- a/media/libstagefright/include/HTTPBase.h
+++ b/media/libstagefright/include/HTTPBase.h
@@ -56,8 +56,13 @@
static void RegisterSocketUserMark(int sockfd, uid_t uid);
static void UnRegisterSocketUserMark(int sockfd);
+ virtual String8 toString() {
+ return mName;
+ }
+
protected:
virtual void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
+ String8 mName;
private:
struct BandwidthEntry {
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index a29bdf9..2639280 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -48,6 +48,10 @@
virtual String8 getMIMEType() const;
+ virtual String8 toString() {
+ return mName;
+ }
+
////////////////////////////////////////////////////////////////////////////
size_t cachedSize();
@@ -99,6 +103,7 @@
sp<DataSource> mSource;
sp<AHandlerReflector<NuCachedSource2> > mReflector;
sp<ALooper> mLooper;
+ String8 mName;
Mutex mSerializer;
mutable Mutex mLock;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e5c7177..208dc8b 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2258,6 +2258,12 @@
audio_unique_id_t AudioFlinger::newAudioUniqueId(audio_unique_id_use_t use)
{
+ // This is a binder API, so a malicious client could pass in a bad parameter.
+ // Check for that before calling the internal API nextUniqueId().
+ if ((unsigned) use >= (unsigned) AUDIO_UNIQUE_ID_USE_MAX) {
+ ALOGE("newAudioUniqueId invalid use %d", use);
+ return AUDIO_UNIQUE_ID_ALLOCATE;
+ }
return nextUniqueId(use);
}
@@ -2421,6 +2427,7 @@
int32_t base = android_atomic_add(AUDIO_UNIQUE_ID_USE_MAX, &mNextUniqueId);
// We have no way of recovering from wraparound
LOG_ALWAYS_FATAL_IF(base == 0, "unique ID overflow");
+ // This is the internal API, so it is OK to assert on bad parameter.
LOG_ALWAYS_FATAL_IF((unsigned) use >= (unsigned) AUDIO_UNIQUE_ID_USE_MAX);
ALOG_ASSERT(audio_unique_id_get_use(base) == AUDIO_UNIQUE_ID_USE_UNSPECIFIED);
return (audio_unique_id_t) (base | use);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index f11fd1c..498c33e 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -196,6 +196,7 @@
virtual uint32_t getInputFramesLost(audio_io_handle_t ioHandle) const;
+ // This is the binder API. For the internal API see nextUniqueId().
virtual audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t use);
virtual void acquireAudioSessionId(audio_session_t audioSession, pid_t pid);
@@ -559,6 +560,7 @@
// or from positive to negative (for signed IDs).
// Thus it may fail by returning an ID of the wrong sign,
// or by returning a non-unique ID.
+ // This is the internal API. For the binder API see newAudioUniqueId().
audio_unique_id_t nextUniqueId(audio_unique_id_use_t use);
status_t moveEffectChain_l(audio_session_t sessionId,
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
index 5987d1a..98f7a94 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfile.cpp
@@ -32,8 +32,8 @@
audio_format_t format) const
{
if (audio_formats_match(format, mFormat) &&
- (mChannelMasks.isEmpty() || supportsChannels(channelMask)) &&
- (mSamplingRates.isEmpty() || supportsRate(samplingRate))) {
+ supportsChannels(channelMask) &&
+ supportsRate(samplingRate)) {
return NO_ERROR;
}
return BAD_VALUE;
diff --git a/services/mediaextractor/MediaExtractorService.cpp b/services/mediaextractor/MediaExtractorService.cpp
index a2b35f6..0c93af1 100644
--- a/services/mediaextractor/MediaExtractorService.cpp
+++ b/services/mediaextractor/MediaExtractorService.cpp
@@ -26,69 +26,29 @@
namespace android {
-typedef struct {
- String8 mime;
- String8 name;
- pid_t owner;
- wp<MediaExtractor> extractor;
- String8 toString() {
- String8 str = name;
- str.append(" for mime ");
- str.append(mime);
- str.append(String8::format(", pid %d: ", owner));
- if (extractor.promote() == NULL) {
- str.append("deleted");
- } else {
- str.append("active");
- }
- return str;
- }
-} ExtractorInstance;
-
-static Vector<ExtractorInstance> extractors;
-
sp<IMediaExtractor> MediaExtractorService::makeExtractor(
const sp<IDataSource> &remoteSource, const char *mime) {
ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime);
sp<DataSource> localSource = DataSource::CreateFromIDataSource(remoteSource);
- sp<MediaExtractor> ret = MediaExtractor::CreateFromService(localSource, mime);
+ sp<IMediaExtractor> ret = MediaExtractor::CreateFromService(localSource, mime);
ALOGV("extractor service created %p (%s)",
ret.get(),
ret == NULL ? "" : ret->name());
if (ret != NULL) {
- ExtractorInstance ex;
- ex.mime = mime == NULL ? "NULL" : mime;
- ex.name = ret->name();
- ex.owner = IPCThreadState::self()->getCallingPid();
- ex.extractor = ret;
-
- if (extractors.size() > 10) {
- extractors.resize(10);
- }
- extractors.push_front(ex);
+ registerMediaExtractor(ret, remoteSource, mime);
}
return ret;
}
status_t MediaExtractorService::dump(int fd, const Vector<String16>& args) {
- String8 out;
- out.append("Recent extractors, most recent first:\n");
- for (size_t i = 0; i < extractors.size(); i++) {
- ExtractorInstance ex = extractors.itemAt(i);
- out.append(" ");
- out.append(ex.toString());
- out.append("\n");
- }
- write(fd, out.string(), out.size());
- return OK;
+ return dumpExtractors(fd, args);
}
-
status_t MediaExtractorService::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
uint32_t flags)
{