Merge "MediaMetrics: limit maximum memory used" into rvc-dev
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 446ca4f..bfa60d9 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -508,7 +508,7 @@
case ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE:
case ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST:
case ACAMERA_CONTROL_ENABLE_ZSL:
- case ACAMERA_CONTROL_BOKEH_MODE:
+ case ACAMERA_CONTROL_EXTENDED_SCENE_MODE:
case ACAMERA_CONTROL_ZOOM_RATIO:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
@@ -527,7 +527,6 @@
case ACAMERA_LENS_OPTICAL_STABILIZATION_MODE:
case ACAMERA_NOISE_REDUCTION_MODE:
case ACAMERA_SCALER_CROP_REGION:
- case ACAMERA_SCALER_ROTATE_AND_CROP:
case ACAMERA_SENSOR_EXPOSURE_TIME:
case ACAMERA_SENSOR_FRAME_DURATION:
case ACAMERA_SENSOR_SENSITIVITY:
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index bd259eb..4f9b0d1 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -984,10 +984,10 @@
* capture parameters itself.</p>
* <p>When set to AUTO, the individual algorithm controls in
* ACAMERA_CONTROL_* are in effect, such as ACAMERA_CONTROL_AF_MODE.</p>
- * <p>When set to USE_SCENE_MODE, the individual controls in
+ * <p>When set to USE_SCENE_MODE or USE_EXTENDED_SCENE_MODE, the individual controls in
* ACAMERA_CONTROL_* are mostly disabled, and the camera device
- * implements one of the scene mode settings (such as ACTION,
- * SUNSET, or PARTY) as it wishes. The camera device scene mode
+ * implements one of the scene mode or extended scene mode settings (such as ACTION,
+ * SUNSET, PARTY, or BOKEH) as it wishes. The camera device scene mode
* 3A settings are provided by {@link ACameraCaptureSession_captureCallback_result capture results}.</p>
* <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
* is that this frame will not be used by camera device background 3A statistics
@@ -1768,10 +1768,11 @@
ACAMERA_CONTROL_AF_SCENE_CHANGE = // byte (acamera_metadata_enum_android_control_af_scene_change_t)
ACAMERA_CONTROL_START + 42,
/**
- * <p>The list of bokeh modes for ACAMERA_CONTROL_BOKEH_MODE that are supported by this camera
- * device, and each bokeh mode's maximum streaming (non-stall) size with bokeh effect.</p>
+ * <p>The list of extended scene modes for ACAMERA_CONTROL_EXTENDED_SCENE_MODE that are supported
+ * by this camera device, and each extended scene mode's maximum streaming (non-stall) size
+ * with effect.</p>
*
- * @see ACAMERA_CONTROL_BOKEH_MODE
+ * @see ACAMERA_CONTROL_EXTENDED_SCENE_MODE
*
* <p>Type: int32[3*n]</p>
*
@@ -1780,28 +1781,28 @@
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* </ul></p>
*
- * <p>For OFF mode, the camera behaves normally with no bokeh effect.</p>
- * <p>For STILL_CAPTURE mode, the maximum streaming dimension specifies the limit under which
- * bokeh is effective when capture intent is PREVIEW. Note that when capture intent is
- * PREVIEW, the bokeh effect may not be as high quality compared to STILL_CAPTURE intent
- * in order to maintain reasonable frame rate. The maximum streaming dimension must be one
- * of the YUV_420_888 or PRIVATE resolutions in availableStreamConfigurations, or (0, 0)
- * if preview bokeh is not supported. If the application configures a stream larger than
- * the maximum streaming dimension, bokeh effect may not be applied for this stream for
- * PREVIEW intent.</p>
- * <p>For CONTINUOUS mode, the maximum streaming dimension specifies the limit under which
- * bokeh is effective. This dimension must be one of the YUV_420_888 or PRIVATE resolutions
- * in availableStreamConfigurations, and if the sensor maximum resolution is larger than or
- * equal to 1080p, the maximum streaming dimension must be at least 1080p. If the
- * application configures a stream with larger dimension, the stream may not have bokeh
- * effect applied.</p>
+ * <p>For DISABLED mode, the camera behaves normally with no extended scene mode enabled.</p>
+ * <p>For BOKEH_STILL_CAPTURE mode, the maximum streaming dimension specifies the limit
+ * under which bokeh is effective when capture intent is PREVIEW. Note that when capture
+ * intent is PREVIEW, the bokeh effect may not be as high in quality compared to
+ * STILL_CAPTURE intent in order to maintain reasonable frame rate. The maximum streaming
+ * dimension must be one of the YUV_420_888 or PRIVATE resolutions in
+ * availableStreamConfigurations, or (0, 0) if preview bokeh is not supported. If the
+ * application configures a stream larger than the maximum streaming dimension, bokeh
+ * effect may not be applied for this stream for PREVIEW intent.</p>
+ * <p>For BOKEH_CONTINUOUS mode, the maximum streaming dimension specifies the limit under
+ * which bokeh is effective. This dimension must be one of the YUV_420_888 or PRIVATE
+ * resolutions in availableStreamConfigurations, and if the sensor maximum resolution is
+ * larger than or equal to 1080p, the maximum streaming dimension must be at least 1080p.
+ * If the application configures a stream with larger dimension, the stream may not have
+ * bokeh effect applied.</p>
*/
- ACAMERA_CONTROL_AVAILABLE_BOKEH_MAX_SIZES = // int32[3*n]
+ ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES = // int32[3*n]
ACAMERA_CONTROL_START + 43,
/**
- * <p>The ranges of supported zoom ratio for non-OFF ACAMERA_CONTROL_BOKEH_MODE.</p>
+ * <p>The ranges of supported zoom ratio for non-DISABLED ACAMERA_CONTROL_EXTENDED_SCENE_MODE.</p>
*
- * @see ACAMERA_CONTROL_BOKEH_MODE
+ * @see ACAMERA_CONTROL_EXTENDED_SCENE_MODE
*
* <p>Type: float[2*n]</p>
*
@@ -1810,20 +1811,19 @@
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* </ul></p>
*
- * <p>When bokeh mode is enabled, the camera device may have limited range of zoom ratios
- * compared to when bokeh mode is disabled. This tag lists the zoom ratio ranges for all
- * supported non-OFF bokeh modes, in the same order as in
- * ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES.</p>
+ * <p>When extended scene mode is set, the camera device may have limited range of zoom ratios
+ * compared to when extended scene mode is DISABLED. This tag lists the zoom ratio ranges
+ * for all supported non-DISABLED extended scene modes, in the same order as in
+ * android.control.availableExtended.</p>
* <p>Range [1.0, 1.0] means that no zoom (optical or digital) is supported.</p>
- *
- * @see ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES
*/
- ACAMERA_CONTROL_AVAILABLE_BOKEH_ZOOM_RATIO_RANGES = // float[2*n]
+ ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES =
+ // float[2*n]
ACAMERA_CONTROL_START + 44,
/**
- * <p>Whether bokeh mode is enabled for a particular capture request.</p>
+ * <p>Whether extended scene mode is enabled for a particular capture request.</p>
*
- * <p>Type: byte (acamera_metadata_enum_android_control_bokeh_mode_t)</p>
+ * <p>Type: byte (acamera_metadata_enum_android_control_extended_scene_mode_t)</p>
*
* <p>This tag may appear in:
* <ul>
@@ -1833,36 +1833,33 @@
*
* <p>With bokeh mode, the camera device may blur out the parts of scene that are not in
* focus, creating a bokeh (or shallow depth of field) effect for people or objects.</p>
- * <p>When set to STILL_CAPTURE bokeh mode with STILL_CAPTURE capture intent, due to the extra
+ * <p>When set to BOKEH_STILL_CAPTURE mode with STILL_CAPTURE capture intent, due to the extra
* processing needed for high quality bokeh effect, the stall may be longer than when
* capture intent is not STILL_CAPTURE.</p>
- * <p>When set to STILL_CAPTURE bokeh mode with PREVIEW capture intent,</p>
+ * <p>When set to BOKEH_STILL_CAPTURE mode with PREVIEW capture intent,</p>
* <ul>
* <li>If the camera device has BURST_CAPTURE capability, the frame rate requirement of
* BURST_CAPTURE must still be met.</li>
- * <li>All streams not larger than the maximum streaming dimension for STILL_CAPTURE mode
- * (queried via {@link ACAMERA_CONTROL_AVAILABLE_BOKEH_CAPABILITIES })
+ * <li>All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
+ * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES })
* will have preview bokeh effect applied.</li>
* </ul>
- * <p>When set to CONTINUOUS mode, configured streams dimension should not exceed this mode's
+ * <p>When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
* maximum streaming dimension in order to have bokeh effect applied. Bokeh effect may not
* be available for streams larger than the maximum streaming dimension.</p>
- * <p>Switching between different bokeh modes may involve reconfiguration of the camera
+ * <p>Switching between different extended scene modes may involve reconfiguration of the camera
* pipeline, resulting in long latency. The application should check this key against the
* available session keys queried via
* {@link ACameraManager_getCameraCharacteristics }.</p>
- * <p>When bokeh mode is on, the camera device may override certain control parameters, such as
- * reduce frame rate or use face priority scene mode, to achieve best power and quality
- * tradeoffs. When turned on, AE, AWB, and AF run in auto modes, and only the mandatory
- * stream combinations of LIMITED hardware level are guaranteed.</p>
* <p>For a logical multi-camera, bokeh may be implemented by stereo vision from sub-cameras
* with different field of view. As a result, when bokeh mode is enabled, the camera device
- * may override ACAMERA_SCALER_CROP_REGION, and the field of view will be smaller than when
- * bokeh mode is off.</p>
+ * may override ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO, and the field of
+ * view may be smaller than when bokeh mode is off.</p>
*
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
* @see ACAMERA_SCALER_CROP_REGION
*/
- ACAMERA_CONTROL_BOKEH_MODE = // byte (acamera_metadata_enum_android_control_bokeh_mode_t)
+ ACAMERA_CONTROL_EXTENDED_SCENE_MODE = // byte (acamera_metadata_enum_android_control_extended_scene_mode_t)
ACAMERA_CONTROL_START + 45,
/**
* <p>Minimum and maximum zoom ratios supported by this camera device.</p>
@@ -3690,108 +3687,6 @@
ACAMERA_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP =
// int32
ACAMERA_SCALER_START + 15,
- /**
- * <p>List of rotate-and-crop modes for ACAMERA_SCALER_ROTATE_AND_CROP that are supported by this camera device.</p>
- *
- * @see ACAMERA_SCALER_ROTATE_AND_CROP
- *
- * <p>Type: byte[n]</p>
- *
- * <p>This tag may appear in:
- * <ul>
- * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
- * </ul></p>
- *
- * <p>This entry lists the valid modes for ACAMERA_SCALER_ROTATE_AND_CROP for this camera device.</p>
- * <p>Starting with API level 30, all devices will list at least <code>ROTATE_AND_CROP_NONE</code>.
- * Devices with support for rotate-and-crop will additionally list at least
- * <code>ROTATE_AND_CROP_AUTO</code> and <code>ROTATE_AND_CROP_90</code>.</p>
- *
- * @see ACAMERA_SCALER_ROTATE_AND_CROP
- */
- ACAMERA_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES = // byte[n]
- ACAMERA_SCALER_START + 16,
- /**
- * <p>Whether a rotation-and-crop operation is applied to processed
- * outputs from the camera.</p>
- *
- * <p>Type: byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)</p>
- *
- * <p>This tag may appear in:
- * <ul>
- * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
- * <li>ACaptureRequest</li>
- * </ul></p>
- *
- * <p>This control is primarily intended to help camera applications with no support for
- * multi-window modes to work correctly on devices where multi-window scenarios are
- * unavoidable, such as foldables or other devices with variable display geometry or more
- * free-form window placement (such as laptops, which often place portrait-orientation apps
- * in landscape with pillarboxing).</p>
- * <p>If supported, the default value is <code>ROTATE_AND_CROP_AUTO</code>, which allows the camera API
- * to enable backwards-compatibility support for applications that do not support resizing
- * / multi-window modes, when the device is in fact in a multi-window mode (such as inset
- * portrait on laptops, or on a foldable device in some fold states). In addition,
- * <code>ROTATE_AND_CROP_NONE</code> and <code>ROTATE_AND_CROP_90</code> will always be available if this control
- * is supported by the device. If not supported, devices API level 30 or higher will always
- * list only <code>ROTATE_AND_CROP_NONE</code>.</p>
- * <p>When <code>CROP_AUTO</code> is in use, and the camera API activates backward-compatibility mode,
- * several metadata fields will also be parsed differently to ensure that coordinates are
- * correctly handled for features like drawing face detection boxes or passing in
- * tap-to-focus coordinates. The camera API will convert positions in the active array
- * coordinate system to/from the cropped-and-rotated coordinate system to make the
- * operation transparent for applications. The following controls are affected:</p>
- * <ul>
- * <li>ACAMERA_CONTROL_AE_REGIONS</li>
- * <li>ACAMERA_CONTROL_AF_REGIONS</li>
- * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
- * <li>android.statistics.faces</li>
- * </ul>
- * <p>Capture results will contain the actual value selected by the API;
- * <code>ROTATE_AND_CROP_AUTO</code> will never be seen in a capture result.</p>
- * <p>Applications can also select their preferred cropping mode, either to opt out of the
- * backwards-compatibility treatment, or to use the cropping feature themselves as needed.
- * In this case, no coordinate translation will be done automatically, and all controls
- * will continue to use the normal active array coordinates.</p>
- * <p>Cropping and rotating is done after the application of digital zoom (via either
- * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO), but before each individual
- * output is further cropped and scaled. It only affects processed outputs such as
- * YUV, PRIVATE, and JPEG. It has no effect on RAW outputs.</p>
- * <p>When <code>CROP_90</code> or <code>CROP_270</code> are selected, there is a significant loss to the field of
- * view. For example, with a 4:3 aspect ratio output of 1600x1200, <code>CROP_90</code> will still
- * produce 1600x1200 output, but these buffers are cropped from a vertical 3:4 slice at the
- * center of the 4:3 area, then rotated to be 4:3, and then upscaled to 1600x1200. Only
- * 56.25% of the original FOV is still visible. In general, for an aspect ratio of <code>w:h</code>,
- * the crop and rotate operation leaves <code>(h/w)^2</code> of the field of view visible. For 16:9,
- * this is ~31.6%.</p>
- * <p>As a visual example, the figure below shows the effect of <code>ROTATE_AND_CROP_90</code> on the
- * outputs for the following parameters:</p>
- * <ul>
- * <li>Sensor active array: <code>2000x1500</code></li>
- * <li>Crop region: top-left: <code>(500, 375)</code>, size: <code>(1000, 750)</code> (4:3 aspect ratio)</li>
- * <li>Output streams: YUV <code>640x480</code> and YUV <code>1280x720</code></li>
- * <li><code>ROTATE_AND_CROP_90</code></li>
- * </ul>
- * <p><img alt="Effect of ROTATE_AND_CROP_90" src="../images/camera2/metadata/android.scaler.rotateAndCrop/crop-region-rotate-90-43-ratio.png" /></p>
- * <p>With these settings, the regions of the active array covered by the output streams are:</p>
- * <ul>
- * <li>640x480 stream crop: top-left: <code>(219, 375)</code>, size: <code>(562, 750)</code></li>
- * <li>1280x720 stream crop: top-left: <code>(289, 375)</code>, size: <code>(422, 750)</code></li>
- * </ul>
- * <p>Since the buffers are rotated, the buffers as seen by the application are:</p>
- * <ul>
- * <li>640x480 stream: top-left: <code>(781, 375)</code> on active array, size: <code>(640, 480)</code>, downscaled 1.17x from sensor pixels</li>
- * <li>1280x720 stream: top-left: <code>(711, 375)</code> on active array, size: <code>(1280, 720)</code>, upscaled 1.71x from sensor pixels</li>
- * </ul>
- *
- * @see ACAMERA_CONTROL_AE_REGIONS
- * @see ACAMERA_CONTROL_AF_REGIONS
- * @see ACAMERA_CONTROL_AWB_REGIONS
- * @see ACAMERA_CONTROL_ZOOM_RATIO
- * @see ACAMERA_SCALER_CROP_REGION
- */
- ACAMERA_SCALER_ROTATE_AND_CROP = // byte (acamera_metadata_enum_android_scaler_rotate_and_crop_t)
- ACAMERA_SCALER_START + 17,
ACAMERA_SCALER_END,
/**
@@ -6993,6 +6888,7 @@
* This setting can only be used if scene mode is supported (i.e.
* ACAMERA_CONTROL_AVAILABLE_SCENE_MODES
* contain some modes other than DISABLED).</p>
+ * <p>For extended scene modes such as BOKEH, please use USE_EXTENDED_SCENE_MODE instead.</p>
*
* @see ACAMERA_CONTROL_AVAILABLE_SCENE_MODES
*/
@@ -7010,6 +6906,18 @@
*/
ACAMERA_CONTROL_MODE_OFF_KEEP_STATE = 3,
+ /**
+ * <p>Use a specific extended scene mode.</p>
+ * <p>When extended scene mode is on, the camera device may override certain control
+ * parameters, such as targetFpsRange, AE, AWB, and AF modes, to achieve best power and
+ * quality tradeoffs. Only the mandatory stream combinations of LIMITED hardware level
+ * are guaranteed.</p>
+ * <p>This setting can only be used if extended scene mode is supported (i.e.
+ * android.control.availableExtendedSceneModes
+ * contains some modes other than DISABLED).</p>
+ */
+ ACAMERA_CONTROL_MODE_USE_EXTENDED_SCENE_MODE = 4,
+
} acamera_metadata_enum_android_control_mode_t;
// ACAMERA_CONTROL_SCENE_MODE
@@ -7399,12 +7307,12 @@
} acamera_metadata_enum_android_control_af_scene_change_t;
-// ACAMERA_CONTROL_BOKEH_MODE
-typedef enum acamera_metadata_enum_acamera_control_bokeh_mode {
+// ACAMERA_CONTROL_EXTENDED_SCENE_MODE
+typedef enum acamera_metadata_enum_acamera_control_extended_scene_mode {
/**
- * <p>Bokeh mode is disabled.</p>
+ * <p>Extended scene mode is disabled.</p>
*/
- ACAMERA_CONTROL_BOKEH_MODE_OFF = 0,
+ ACAMERA_CONTROL_EXTENDED_SCENE_MODE_DISABLED = 0,
/**
* <p>High quality bokeh mode is enabled for all non-raw streams (including YUV,
@@ -7412,7 +7320,7 @@
* extra image processing, this mode may introduce additional stall to non-raw streams.
* This mode should be used in high quality still capture use case.</p>
*/
- ACAMERA_CONTROL_BOKEH_MODE_STILL_CAPTURE = 1,
+ ACAMERA_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE = 1,
/**
* <p>Bokeh effect must not slow down capture rate relative to sensor raw output,
@@ -7420,9 +7328,9 @@
* streaming dimension. This mode should be used if performance and power are a
* priority, such as video recording.</p>
*/
- ACAMERA_CONTROL_BOKEH_MODE_CONTINUOUS = 2,
+ ACAMERA_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS = 2,
-} acamera_metadata_enum_android_control_bokeh_mode_t;
+} acamera_metadata_enum_android_control_extended_scene_mode_t;
@@ -8314,51 +8222,6 @@
} acamera_metadata_enum_android_scaler_available_recommended_stream_configurations_t;
-// ACAMERA_SCALER_ROTATE_AND_CROP
-typedef enum acamera_metadata_enum_acamera_scaler_rotate_and_crop {
- /**
- * <p>No rotate and crop is applied. Processed outputs are in the sensor orientation.</p>
- */
- ACAMERA_SCALER_ROTATE_AND_CROP_NONE = 0,
-
- /**
- * <p>Processed images are rotated by 90 degrees clockwise, and then cropped
- * to the original aspect ratio.</p>
- */
- ACAMERA_SCALER_ROTATE_AND_CROP_90 = 1,
-
- /**
- * <p>Processed images are rotated by 180 degrees. Since the aspect ratio does not
- * change, no cropping is performed.</p>
- */
- ACAMERA_SCALER_ROTATE_AND_CROP_180 = 2,
-
- /**
- * <p>Processed images are rotated by 270 degrees clockwise, and then cropped
- * to the original aspect ratio.</p>
- */
- ACAMERA_SCALER_ROTATE_AND_CROP_270 = 3,
-
- /**
- * <p>The camera API automatically selects the best concrete value for
- * rotate-and-crop based on the application's support for resizability and the current
- * multi-window mode.</p>
- * <p>If the application does not support resizing but the display mode for its main
- * Activity is not in a typical orientation, the camera API will set <code>ROTATE_AND_CROP_90</code>
- * or some other supported rotation value, depending on device configuration,
- * to ensure preview and captured images are correctly shown to the user. Otherwise,
- * <code>ROTATE_AND_CROP_NONE</code> will be selected.</p>
- * <p>When a value other than NONE is selected, several metadata fields will also be parsed
- * differently to ensure that coordinates are correctly handled for features like drawing
- * face detection boxes or passing in tap-to-focus coordinates. The camera API will
- * convert positions in the active array coordinate system to/from the cropped-and-rotated
- * coordinate system to make the operation transparent for applications.</p>
- * <p>No coordinate mapping will be done when the application selects a non-AUTO mode.</p>
- */
- ACAMERA_SCALER_ROTATE_AND_CROP_AUTO = 4,
-
-} acamera_metadata_enum_android_scaler_rotate_and_crop_t;
-
// ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 5990116..b112249 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -319,10 +319,11 @@
// Obtain Codec2Client
std::vector<Traits> traits = Codec2Client::ListComponents();
- // parse APEX XML first, followed by vendor XML
+ // parse APEX XML first, followed by vendor XML.
+ // Note: APEX XML names do not depend on ro.media.xml_variant.* properties.
MediaCodecsXmlParser parser;
parser.parseXmlFilesInSearchDirs(
- parser.getDefaultXmlNames(),
+ { "media_codecs.xml", "media_codecs_performance.xml" },
{ "/apex/com.android.media.swcodec/etc" });
// TODO: remove these c2-specific files once product moved to default file names
diff --git a/media/libaudioclient/AudioEffect.cpp b/media/libaudioclient/AudioEffect.cpp
index a1b141b..c183ab0 100644
--- a/media/libaudioclient/AudioEffect.cpp
+++ b/media/libaudioclient/AudioEffect.cpp
@@ -36,7 +36,7 @@
// ---------------------------------------------------------------------------
AudioEffect::AudioEffect(const String16& opPackageName)
- : mStatus(NO_INIT), mOpPackageName(opPackageName)
+ : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
{
}
@@ -49,12 +49,13 @@
void* user,
audio_session_t sessionId,
audio_io_handle_t io,
- const AudioDeviceTypeAddr& device
+ const AudioDeviceTypeAddr& device,
+ bool probe
)
- : mStatus(NO_INIT), mOpPackageName(opPackageName)
+ : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
{
AutoMutex lock(mConstructLock);
- mStatus = set(type, uuid, priority, cbf, user, sessionId, io, device);
+ mStatus = set(type, uuid, priority, cbf, user, sessionId, io, device, probe);
}
AudioEffect::AudioEffect(const char *typeStr,
@@ -65,9 +66,10 @@
void* user,
audio_session_t sessionId,
audio_io_handle_t io,
- const AudioDeviceTypeAddr& device
+ const AudioDeviceTypeAddr& device,
+ bool probe
)
- : mStatus(NO_INIT), mOpPackageName(opPackageName)
+ : mStatus(NO_INIT), mProbe(false), mOpPackageName(opPackageName)
{
effect_uuid_t type;
effect_uuid_t *pType = NULL;
@@ -89,7 +91,7 @@
}
AutoMutex lock(mConstructLock);
- mStatus = set(pType, pUuid, priority, cbf, user, sessionId, io, device);
+ mStatus = set(pType, pUuid, priority, cbf, user, sessionId, io, device, probe);
}
status_t AudioEffect::set(const effect_uuid_t *type,
@@ -99,7 +101,8 @@
void* user,
audio_session_t sessionId,
audio_io_handle_t io,
- const AudioDeviceTypeAddr& device)
+ const AudioDeviceTypeAddr& device,
+ bool probe)
{
sp<IEffect> iEffect;
sp<IMemory> cblk;
@@ -126,7 +129,7 @@
ALOGW("Must specify at least type or uuid");
return BAD_VALUE;
}
-
+ mProbe = probe;
mPriority = priority;
mCbf = cbf;
mUserData = user;
@@ -142,15 +145,18 @@
iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
mIEffectClient, priority, io, mSessionId, device, mOpPackageName, mClientPid,
- &mStatus, &mId, &enabled);
+ probe, &mStatus, &mId, &enabled);
- if (iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) {
+ // In probe mode, we stop here and return the status: the IEffect interface to
+ // audio flinger will not be retained. initCheck() will return the creation status
+ // but all other APIs will return invalid operation.
+ if (probe || iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) {
char typeBuffer[64], uuidBuffer[64];
guidToString(type, typeBuffer, sizeof(typeBuffer));
guidToString(uuid, uuidBuffer, sizeof(uuidBuffer));
- ALOGE("set(): AudioFlinger could not create effect %s / %s, status: %d",
+ ALOGE_IF(!probe, "set(): AudioFlinger could not create effect %s / %s, status: %d",
typeBuffer, uuidBuffer, mStatus);
- if (iEffect == 0) {
+ if (!probe && iEffect == 0) {
mStatus = NO_INIT;
}
return mStatus;
@@ -191,7 +197,7 @@
{
ALOGV("Destructor %p", this);
- if (mStatus == NO_ERROR || mStatus == ALREADY_EXISTS) {
+ if (!mProbe && (mStatus == NO_ERROR || mStatus == ALREADY_EXISTS)) {
if (!audio_is_global_session(mSessionId)) {
AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
}
@@ -201,9 +207,9 @@
}
mIEffect.clear();
mCblkMemory.clear();
- mIEffectClient.clear();
- IPCThreadState::self()->flushCommands();
}
+ mIEffectClient.clear();
+ IPCThreadState::self()->flushCommands();
}
@@ -226,6 +232,9 @@
status_t AudioEffect::setEnabled(bool enabled)
{
+ if (mProbe) {
+ return INVALID_OPERATION;
+ }
if (mStatus != NO_ERROR) {
return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
}
@@ -254,6 +263,9 @@
uint32_t *replySize,
void *replyData)
{
+ if (mProbe) {
+ return INVALID_OPERATION;
+ }
if (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS) {
ALOGV("command() bad status %d", mStatus);
return mStatus;
@@ -287,6 +299,9 @@
status_t AudioEffect::setParameter(effect_param_t *param)
{
+ if (mProbe) {
+ return INVALID_OPERATION;
+ }
if (mStatus != NO_ERROR) {
return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
}
@@ -307,6 +322,9 @@
status_t AudioEffect::setParameterDeferred(effect_param_t *param)
{
+ if (mProbe) {
+ return INVALID_OPERATION;
+ }
if (mStatus != NO_ERROR) {
return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
}
@@ -333,6 +351,9 @@
status_t AudioEffect::setParameterCommit()
{
+ if (mProbe) {
+ return INVALID_OPERATION;
+ }
if (mStatus != NO_ERROR) {
return (mStatus == ALREADY_EXISTS) ? (status_t) INVALID_OPERATION : mStatus;
}
@@ -347,6 +368,9 @@
status_t AudioEffect::getParameter(effect_param_t *param)
{
+ if (mProbe) {
+ return INVALID_OPERATION;
+ }
if (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS) {
return mStatus;
}
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 513da2b..16d2232 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -662,6 +662,7 @@
const AudioDeviceTypeAddr& device,
const String16& opPackageName,
pid_t pid,
+ bool probe,
status_t *status,
int *id,
int *enabled)
@@ -689,6 +690,7 @@
}
data.writeString16(opPackageName);
data.writeInt32((int32_t) pid);
+ data.writeInt32(probe ? 1 : 0);
status_t lStatus = remote()->transact(CREATE_EFFECT, data, &reply);
if (lStatus != NO_ERROR) {
@@ -1395,12 +1397,13 @@
}
const String16 opPackageName = data.readString16();
pid_t pid = (pid_t)data.readInt32();
+ bool probe = data.readInt32() == 1;
int id = 0;
int enabled = 0;
sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId, device,
- opPackageName, pid, &status, &id, &enabled);
+ opPackageName, pid, probe, &status, &id, &enabled);
reply->writeInt32(status);
reply->writeInt32(id);
reply->writeInt32(enabled);
diff --git a/media/libaudioclient/include/media/AudioEffect.h b/media/libaudioclient/include/media/AudioEffect.h
index eec9dfc..cb76252 100644
--- a/media/libaudioclient/include/media/AudioEffect.h
+++ b/media/libaudioclient/include/media/AudioEffect.h
@@ -365,6 +365,10 @@
* device: An audio device descriptor. Only used when "sessionID" is AUDIO_SESSION_DEVICE.
* Specifies the audio device type and address the effect must be attached to.
* If "sessionID" is AUDIO_SESSION_DEVICE then "io" must be AUDIO_IO_HANDLE_NONE.
+ * probe: true if created in a degraded mode to only verify if effect creation is possible.
+ * In this mode, no IEffect interface to AudioFlinger is created and all actions
+ * besides getters implemented in client AudioEffect object are no ops
+ * after effect creation.
*/
AudioEffect(const effect_uuid_t *type,
@@ -375,8 +379,8 @@
void* user = NULL,
audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
- const AudioDeviceTypeAddr& device = {}
- );
+ const AudioDeviceTypeAddr& device = {},
+ bool probe = false);
/* Constructor.
* Same as above but with type and uuid specified by character strings
@@ -389,8 +393,8 @@
void* user = NULL,
audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
- const AudioDeviceTypeAddr& device = {}
- );
+ const AudioDeviceTypeAddr& device = {},
+ bool probe = false);
/* Terminates the AudioEffect and unregisters it from AudioFlinger.
* The effect engine is also destroyed if this AudioEffect was the last controlling
@@ -412,8 +416,8 @@
void* user = NULL,
audio_session_t sessionId = AUDIO_SESSION_OUTPUT_MIX,
audio_io_handle_t io = AUDIO_IO_HANDLE_NONE,
- const AudioDeviceTypeAddr& device = {}
- );
+ const AudioDeviceTypeAddr& device = {},
+ bool probe = false);
/* Result of constructing the AudioEffect. This must be checked
* before using any AudioEffect API.
@@ -547,6 +551,8 @@
audio_session_t mSessionId; // audio session ID
int32_t mPriority; // priority for effect control
status_t mStatus; // effect status
+ bool mProbe; // effect created in probe mode: all commands
+ // are no ops because mIEffect is NULL
effect_callback_t mCbf; // callback function for status, control and
// parameter changes notifications
void* mUserData; // client context for callback function
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index f4ce5d7..c9d9716 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -476,6 +476,7 @@
const AudioDeviceTypeAddr& device,
const String16& callingPackage,
pid_t pid,
+ bool probe,
status_t *status,
int *id,
int *enabled) = 0;
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 98c5497..637322f 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -29,9 +29,55 @@
#include <OMX_Video.h>
#include <sys/stat.h>
+#include <array>
+#include <string>
+#include <vector>
+
namespace android {
-constexpr char const * const MediaProfiles::xmlFiles[];
+namespace /* unnamed */ {
+
+// Returns a list of possible paths for the media_profiles XML file.
+std::array<char const*, 5> const& getXmlPaths() {
+ static std::array<std::string const, 5> const paths =
+ []() -> decltype(paths) {
+ // Directories for XML file that will be searched (in this order).
+ constexpr std::array<char const*, 4> searchDirs = {
+ "product/etc/",
+ "odm/etc/",
+ "vendor/etc/",
+ "system/etc/",
+ };
+
+ // The file name may contain a variant if the vendor property
+ // ro.vendor.media_profiles_xml_variant is set.
+ char variant[PROPERTY_VALUE_MAX];
+ property_get("ro.media.xml_variant.profiles",
+ variant,
+ "_V1_0");
+
+ std::string fileName =
+ std::string("media_profiles") + variant + ".xml";
+
+ return { searchDirs[0] + fileName,
+ searchDirs[1] + fileName,
+ searchDirs[2] + fileName,
+ searchDirs[3] + fileName,
+ "system/etc/media_profiles_V1_0.xml" // System fallback
+ };
+ }();
+ static std::array<char const*, 5> const cPaths = {
+ paths[0].data(),
+ paths[1].data(),
+ paths[2].data(),
+ paths[3].data(),
+ paths[4].data()
+ };
+ return cPaths;
+}
+
+} // unnamed namespace
+
Mutex MediaProfiles::sLock;
bool MediaProfiles::sIsInitialized = false;
MediaProfiles *MediaProfiles::sInstance = NULL;
@@ -48,7 +94,7 @@
{"amrwb", AUDIO_ENCODER_AMR_WB},
{"aac", AUDIO_ENCODER_AAC},
{"heaac", AUDIO_ENCODER_HE_AAC},
- {"aaceld", AUDIO_ENCODER_AAC_ELD},
+ {"aaceld", AUDIO_ENCODER_AAC_ELD},
{"opus", AUDIO_ENCODER_OPUS}
};
@@ -610,7 +656,7 @@
char value[PROPERTY_VALUE_MAX];
if (property_get("media.settings.xml", value, NULL) <= 0) {
const char* xmlFile = nullptr;
- for (auto const& f : xmlFiles) {
+ for (auto const& f : getXmlPaths()) {
if (checkXmlFile(f)) {
xmlFile = f;
break;
diff --git a/media/libmedia/include/media/MediaProfiles.h b/media/libmedia/include/media/MediaProfiles.h
index 3e8e7c8..4cc5b95 100644
--- a/media/libmedia/include/media/MediaProfiles.h
+++ b/media/libmedia/include/media/MediaProfiles.h
@@ -1,18 +1,18 @@
/*
- **
- ** Copyright 2010, The Android Open Source Project.
- **
- ** Licensed under the Apache License, Version 2.0 (the "License");
- ** you may not use this file except in compliance with the License.
- ** You may obtain a copy of the License at
- **
- ** http://www.apache.org/licenses/LICENSE-2.0
- **
- ** Unless required by applicable law or agreed to in writing, software
- ** distributed under the License is distributed on an "AS IS" BASIS,
- ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ** See the License for the specific language governing permissions and
- ** limitations under the License.
+ *
+ * Copyright 2010, The Android Open Source Project.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
*/
#ifndef ANDROID_MEDIAPROFILES_H
@@ -82,29 +82,12 @@
{
public:
- /*
- * If property media.settings.xml is not set:
- *
- * getInstance() will search through paths listed in xmlFiles.
- * The search goes through members of xmlFiles in the order that they are
- * defined, so files at lower indices have higher priority than those at
- * higher indices.
- *
- * TODO: Add runtime validation of xml files. A search should be considered
- * successful only when validation is successful.
- */
- static constexpr char const * const xmlFiles[] = {
- "odm/etc/media_profiles_V1_0.xml",
- "vendor/etc/media_profiles_V1_0.xml",
- "system/etc/media_profiles.xml"
- };
-
/**
* Returns the singleton instance for subsequence queries or NULL if error.
*
* If property media.settings.xml is set, getInstance() will attempt to read
* from file path in media.settings.xml. Otherwise, getInstance() will
- * search through the list xmlFiles as described above.
+ * search through the list of preset XML file paths.
*
* If the search is unsuccessful, the default instance will be created
* instead.
diff --git a/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp b/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp
index 7729d52..4f3951a 100644
--- a/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp
+++ b/media/libmedia/xsd/vts/ValidateMediaProfiles.cpp
@@ -14,23 +14,68 @@
* limitations under the License.
*/
+#include <fstream>
#include <string>
#include <android-base/file.h>
#include <android-base/properties.h>
#include "utility/ValidateXml.h"
+bool isFileReadable(std::string const& path) {
+ std::ifstream f(path);
+ return f.good();
+}
+
TEST(CheckConfig, mediaProfilesValidation) {
RecordProperty("description",
"Verify that the media profiles file "
"is valid according to the schema");
+ // Schema path.
+ constexpr char const* xsdPath = "/data/local/tmp/media_profiles.xsd";
+
+ // If "media.settings.xml" is set, it will be used as an absolute path.
std::string mediaSettingsPath = android::base::GetProperty("media.settings.xml", "");
if (mediaSettingsPath.empty()) {
- mediaSettingsPath.assign("/vendor/etc/media_profiles_V1_0.xml");
- }
+ // If "media.settings.xml" is not set, we will search through a list of
+ // file paths.
- EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS(android::base::Basename(mediaSettingsPath).c_str(),
- {android::base::Dirname(mediaSettingsPath).c_str()},
- "/data/local/tmp/media_profiles.xsd");
+ constexpr char const* xmlSearchDirs[] = {
+ "/product/etc/",
+ "/odm/etc/",
+ "/vendor/etc/",
+ };
+
+ // The vendor may provide a vendor variant for the file name.
+ std::string variant = android::base::GetProperty(
+ "ro.media.xml_variant.profiles", "_V1_0");
+ std::string fileName = "media_profiles" + variant + ".xml";
+
+ // Fallback path does not depend on the property defined from the vendor
+ // partition.
+ constexpr char const* fallbackXmlPath =
+ "/system/etc/media_profiles_V1_0.xml";
+
+ std::vector<std::string> xmlPaths = {
+ xmlSearchDirs[0] + fileName,
+ xmlSearchDirs[1] + fileName,
+ xmlSearchDirs[2] + fileName,
+ fallbackXmlPath
+ };
+
+ auto findXmlPath =
+ std::find_if(xmlPaths.begin(), xmlPaths.end(), isFileReadable);
+ ASSERT_TRUE(findXmlPath != xmlPaths.end())
+ << "Cannot read from " << fileName
+ << " in any search directories ("
+ << xmlSearchDirs[0] << ", "
+ << xmlSearchDirs[1] << ", "
+ << xmlSearchDirs[2] << ") and from "
+ << fallbackXmlPath << ".";
+
+ char const* xmlPath = findXmlPath->c_str();
+ EXPECT_VALID_XML(xmlPath, xsdPath);
+ } else {
+ EXPECT_VALID_XML(mediaSettingsPath.c_str(), xsdPath);
+ }
}
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 2f69f45..5b2f6de 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -47,6 +47,8 @@
#include <hidlmemory/mapping.h>
+#include <vector>
+
static const OMX_U32 kPortIndexInput = 0;
static const OMX_U32 kPortIndexOutput = 1;
@@ -493,9 +495,7 @@
case OMX_StateLoaded:
{
- if (mActiveBuffers.size() > 0) {
- freeActiveBuffers();
- }
+ freeActiveBuffers();
FALLTHROUGH_INTENDED;
}
case OMX_StateInvalid:
@@ -2430,11 +2430,19 @@
}
void OMXNodeInstance::freeActiveBuffers() {
- // Make sure to count down here, as freeBuffer will in turn remove
- // the active buffer from the vector...
- for (size_t i = mActiveBuffers.size(); i > 0;) {
- i--;
- freeBuffer(mActiveBuffers[i].mPortIndex, mActiveBuffers[i].mID);
+ std::vector<OMX_U32> portIndices;
+ std::vector<IOMX::buffer_id> bufferIds;
+ {
+ // Access to mActiveBuffers must be protected by mLock.
+ Mutex::Autolock _l(mLock);
+ for (const ActiveBuffer& activeBuffer : mActiveBuffers) {
+ portIndices.emplace_back(activeBuffer.mPortIndex);
+ bufferIds.emplace_back(activeBuffer.mID);
+ }
+ }
+ for (size_t i = bufferIds.size(); i > 0; ) {
+ --i;
+ freeBuffer(portIndices[i], bufferIds[i]);
}
}
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index d905b8d..a232150 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -21,6 +21,7 @@
#include <android-base/logging.h>
#include <android-base/macros.h>
+#include <android-base/properties.h>
#include <utils/Log.h>
#include <media/stagefright/MediaErrors.h>
@@ -38,8 +39,6 @@
namespace android {
-using MCXP = MediaCodecsXmlParser;
-
namespace {
bool fileExists(const std::string &path) {
@@ -118,8 +117,8 @@
}
}
-MCXP::StringSet parseCommaSeparatedStringSet(const char *s) {
- MCXP::StringSet result;
+MediaCodecsXmlParser::StringSet parseCommaSeparatedStringSet(const char *s) {
+ MediaCodecsXmlParser::StringSet result;
for (const char *ptr = s ? : ""; *ptr; ) {
const char *end = strchrnul(ptr, ',');
if (ptr != end) { // skip empty values
@@ -136,6 +135,23 @@
} // unnamed namespace
+std::vector<std::string> MediaCodecsXmlParser::getDefaultXmlNames() {
+ static constexpr char const* prefixes[] = {
+ "media_codecs",
+ "media_codecs_performance"
+ };
+ static std::vector<std::string> variants = {
+ android::base::GetProperty("ro.media.xml_variant.codecs", ""),
+ android::base::GetProperty("ro.media.xml_variant.codecs_performance", "")
+ };
+ static std::vector<std::string> names = {
+ prefixes[0] + variants[0] + ".xml",
+ prefixes[1] + variants[1] + ".xml"
+ };
+ return names;
+}
+
+
struct MediaCodecsXmlParser::Impl {
// status + error message
struct Result {
diff --git a/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h b/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
index b666de4..e224452 100644
--- a/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
+++ b/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
@@ -33,13 +33,17 @@
class MediaCodecsXmlParser {
public:
- // Treblized media codec list will be located in /odm/etc or /vendor/etc.
+ // Treblized media codec list will be located in /product/etc, /odm/etc or
+ // /vendor/etc.
static std::vector<std::string> getDefaultSearchDirs() {
- return { "/odm/etc", "/vendor/etc", "/etc" };
+ return { "/product/etc",
+ "/odm/etc",
+ "/vendor/etc",
+ "/system/etc" };
}
- static std::vector<std::string> getDefaultXmlNames() {
- return { "media_codecs.xml", "media_codecs_performance.xml" };
- }
+
+ static std::vector<std::string> getDefaultXmlNames();
+
static constexpr char const* defaultProfilingResultsXmlPath =
"/data/misc/media/media_codecs_profiling_results.xml";
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 74a09d1..ecda56b 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -3375,6 +3375,7 @@
const AudioDeviceTypeAddr& device,
const String16& opPackageName,
pid_t pid,
+ bool probe,
status_t *status,
int *id,
int *enabled)
@@ -3490,10 +3491,10 @@
if (sessionId == AUDIO_SESSION_DEVICE) {
sp<Client> client = registerPid(pid);
- ALOGV("%s device type %d address %s", __func__, device.mType, device.getAddress());
+ ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
handle = mDeviceEffectManager.createEffect_l(
&desc, device, client, effectClient, mPatchPanel.patches_l(),
- enabled, &lStatus);
+ enabled, &lStatus, probe);
if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
// remove local strong reference to Client with mClientLock held
Mutex::Autolock _cl(mClientLock);
@@ -3588,7 +3589,7 @@
// create effect on selected output thread
bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
handle = thread->createEffect_l(client, effectClient, priority, sessionId,
- &desc, enabled, &lStatus, pinned);
+ &desc, enabled, &lStatus, pinned, probe);
if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
// remove local strong reference to Client with mClientLock held
Mutex::Autolock _cl(mClientLock);
@@ -3600,7 +3601,7 @@
}
Register:
- if (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS) {
+ if (!probe && (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS)) {
// Check CPU and memory usage
sp<EffectBase> effect = handle->effect().promote();
if (effect != nullptr) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 6d7bf3c..40519b0 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -238,6 +238,7 @@
const AudioDeviceTypeAddr& device,
const String16& opPackageName,
pid_t pid,
+ bool probe,
status_t *status /*non-NULL*/,
int *id,
int *enabled);
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 87a4c6e..a3c3b84 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -74,13 +74,14 @@
const sp<IEffectClient>& effectClient,
const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
int *enabled,
- status_t *status) {
+ status_t *status,
+ bool probe) {
sp<DeviceEffectProxy> effect;
sp<EffectHandle> handle;
status_t lStatus;
lStatus = checkEffectCompatibility(descriptor);
- if (lStatus != NO_ERROR) {
+ if (probe || lStatus != NO_ERROR) {
*status = lStatus;
return handle;
}
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 14ff14d..81e6065 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -36,7 +36,8 @@
const sp<IEffectClient>& effectClient,
const std::map<audio_patch_handle_t, PatchPanel::Patch>& patches,
int *enabled,
- status_t *status);
+ status_t *status,
+ bool probe);
void createAudioPatch(audio_patch_handle_t handle, const PatchPanel::Patch& patch);
void releaseAudioPatch(audio_patch_handle_t handle);
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 82b9c96..8a65122 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -3047,7 +3047,7 @@
int enabled;
*handle = thread->createEffect_l(nullptr, nullptr, 0, AUDIO_SESSION_DEVICE,
const_cast<effect_descriptor_t *>(&mDescriptor),
- &enabled, &status, false);
+ &enabled, &status, false, false /*probe*/);
ALOGV("%s thread->createEffect_l status %d", __func__, status);
} else {
status = BAD_VALUE;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index f24dcd7..65cf96c 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1365,7 +1365,8 @@
effect_descriptor_t *desc,
int *enabled,
status_t *status,
- bool pinned)
+ bool pinned,
+ bool probe)
{
sp<EffectModule> effect;
sp<EffectHandle> handle;
@@ -1387,7 +1388,7 @@
Mutex::Autolock _l(mLock);
lStatus = checkEffectCompatibility_l(desc, sessionId);
- if (lStatus != NO_ERROR) {
+ if (probe || lStatus != NO_ERROR) {
goto Exit;
}
@@ -1433,7 +1434,7 @@
}
Exit:
- if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+ if (!probe && lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
Mutex::Autolock _l(mLock);
if (effectCreated) {
chain->removeEffect_l(effect);
@@ -2012,6 +2013,7 @@
void AudioFlinger::PlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
{
+ dprintf(fd, " Master volume: %f\n", mMasterVolume);
dprintf(fd, " Master mute: %s\n", mMasterMute ? "on" : "off");
if (mHapticChannelMask != AUDIO_CHANNEL_NONE) {
dprintf(fd, " Haptic channel mask: %#x (%s)\n", mHapticChannelMask,
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 153cf7c..8149e95 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -342,7 +342,8 @@
effect_descriptor_t *desc,
int *enabled,
status_t *status /*non-NULL*/,
- bool pinned);
+ bool pinned,
+ bool probe);
// return values for hasAudioSession (bit field)
enum effect_state {
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 1edfbf9..8f20685 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -59,6 +59,10 @@
return res;
}
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->switchToOffline();
+ }
+
return OK;
}
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 354eaf9..b47ee2e 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -200,5 +200,10 @@
}
}
+void CompositeStream::switchToOffline() {
+ Mutex::Autolock l(mMutex);
+ mDevice.clear();
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index de894f3..e5baf1a 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -48,6 +48,9 @@
status_t deleteStream();
+ // Switch to offline mode and release any online resources.
+ void switchToOffline();
+
// Create and register all internal camera streams.
virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index acad8c6..16ce52c 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -619,14 +619,15 @@
strerror(-ret), ret);
}
- sp<CameraDeviceBase> device = mDevice.promote();
- if (!device.get()) {
- ALOGE("%s: Invalid camera device!", __FUNCTION__);
- return NO_INIT;
- }
-
if (mDepthStreamId >= 0) {
- ret = device->deleteStream(mDepthStreamId);
+ // Camera devices may not be valid after switching to offline mode.
+ // In this case, all offline streams including internal composite streams
+ // are managed and released by the offline session.
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (device.get() != nullptr) {
+ ret = device->deleteStream(mDepthStreamId);
+ }
+
mDepthStreamId = -1;
}
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index d25e467..f335c20 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -209,13 +209,14 @@
deinitCodec();
if (mAppSegmentStreamId >= 0) {
+ // Camera devices may not be valid after switching to offline mode.
+ // In this case, all offline streams including internal composite streams
+ // are managed and released by the offline session.
sp<CameraDeviceBase> device = mDevice.promote();
- if (!device.get()) {
- ALOGE("%s: Invalid camera device!", __FUNCTION__);
- return NO_INIT;
+ if (device.get() != nullptr) {
+ res = device->deleteStream(mAppSegmentStreamId);
}
- res = device->deleteStream(mAppSegmentStreamId);
mAppSegmentStreamId = -1;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f29431c..87bdef6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -4758,6 +4758,11 @@
mStreamIdsToBeDrained = streamIds;
}
+void Camera3Device::RequestThread::clearPreviousRequest() {
+ Mutex::Autolock l(mRequestLock);
+ mPrevRequest.clear();
+}
+
status_t Camera3Device::RequestThread::switchToOffline(
const std::vector<int32_t>& streamsToKeep,
/*out*/hardware::camera::device::V3_6::CameraOfflineSessionInfo* offlineSessionInfo,
@@ -5917,6 +5922,7 @@
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
mOperatingMode = NO_MODE;
mIsConstrainedHighSpeedConfiguration = false;
+ mRequestThread->clearPreviousRequest();
return OK;
// TO be done by CameraDeviceClient/Camera3OfflineSession
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 0069fb3..b373a64 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -837,6 +837,8 @@
/*out*/sp<hardware::camera::device::V3_6::ICameraOfflineSession>* offlineSession,
/*out*/camera3::BufferRecords* bufferRecords);
+ void clearPreviousRequest();
+
status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);