Merge "MediaMetrics: Add AudioAnalytics actions"
diff --git a/apex/ld.config.txt b/apex/ld.config.txt
index 1c44e65..713f0b7 100644
--- a/apex/ld.config.txt
+++ b/apex/ld.config.txt
@@ -22,6 +22,12 @@
 namespace.default.search.paths      = /apex/com.android.media.swcodec/${LIB}
 namespace.default.asan.search.paths = /apex/com.android.media.swcodec/${LIB}
 
+# Below lines are required to be able to access libs in APEXes which are
+# actually symlinks to the files under /system/lib. The symlinks exist for
+# bundled APEXes to reduce space.
+namespace.default.permitted.paths   = /system/${LIB}
+namespace.default.asan.permitted.paths = /system/${LIB}
+
 namespace.default.links = platform
 
 # TODO: replace the following when apex has a way to auto-generate this list
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 49783db..6fefa41 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -8380,12 +8380,16 @@
 // ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
 typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
     /**
-     * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic,
-     * but can not be compared to timestamps from other subsystems
-     * (e.g. accelerometer, gyro etc.), or other instances of the same or different
-     * camera devices in the same system. Timestamps between streams and results for
-     * a single camera instance are comparable, and the timestamps for all buffers
-     * and the result metadata generated by a single capture are identical.</p>
+     * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic, but can
+     * not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.),
+     * or other instances of the same or different camera devices in the same system with
+     * accuracy. However, the timestamps are roughly in the same timebase as
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a>.  The accuracy is sufficient for tasks
+     * like A/V synchronization for video recording, at least, and the timestamps can be
+     * directly used together with timestamps from the audio subsystem for that task.</p>
+     * <p>Timestamps between streams and results for a single camera instance are comparable,
+     * and the timestamps for all buffers and the result metadata generated by a single
+     * capture are identical.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
@@ -8395,6 +8399,14 @@
      * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
      * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,
      * and they can be compared to other timestamps using that base.</p>
+     * <p>When buffers from a REALTIME device are passed directly to a video encoder from the
+     * camera, automatic compensation is done to account for differing timebases of the
+     * audio and camera subsystems.  If the application is receiving buffers and then later
+     * sending them to a video encoder or other application where they are compared with
+     * audio subsystem timestamps or similar, this compensation is not present.  In those
+     * cases, applications need to adjust the timestamps themselves.  Since <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a> and <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a> only diverge while the device is asleep, an
+     * offset between the two sources can be measured once per active session and applied
+     * to timestamps for sufficient accuracy for A/V sync.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index b534f8a..c66dea2 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -84,14 +84,13 @@
 using android::sp;
 using android::status_t;
 
-using android::DISPLAY_ORIENTATION_0;
-using android::DISPLAY_ORIENTATION_180;
-using android::DISPLAY_ORIENTATION_90;
 using android::INVALID_OPERATION;
 using android::NAME_NOT_FOUND;
 using android::NO_ERROR;
 using android::UNKNOWN_ERROR;
 
+namespace ui = android::ui;
+
 static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
 static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
 static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
@@ -328,7 +327,7 @@
     }
 
     t.setDisplayProjection(dpy,
-            gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
+            gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
             layerStackRect, displayRect);
     return NO_ERROR;
 }
@@ -414,7 +413,7 @@
  */
 static status_t runEncoder(const sp<MediaCodec>& encoder,
         AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
-        const sp<IBinder>& virtualDpy, uint8_t orientation) {
+        const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
@@ -484,7 +483,7 @@
                     if (err != NO_ERROR) {
                         ALOGW("getDisplayInfo(main) failed: %d", err);
                     } else if (orientation != displayInfo.orientation) {
-                        ALOGD("orientation changed, now %d", displayInfo.orientation);
+                        ALOGD("orientation changed, now %s", toCString(displayInfo.orientation));
                         SurfaceComposerClient::Transaction t;
                         setDisplayProjection(t, virtualDpy, displayInfo);
                         t.apply();
@@ -691,9 +690,9 @@
     }
 
     if (gVerbose) {
-        printf("Display is %dx%d @%.2ffps (orientation=%u), layerStack=%u\n",
+        printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                 displayInfo.viewportW, displayInfo.viewportH, displayInfo.fps,
-                displayInfo.orientation, displayInfo.layerStack);
+                toCString(displayInfo.orientation), displayInfo.layerStack);
         fflush(stdout);
     }
 
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index defc94f..7b447d3 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -23,6 +23,7 @@
 
 LOCAL_MODULE_TAGS := optional
 
+LOCAL_SYSTEM_EXT_MODULE:= true
 LOCAL_MODULE:= stagefright
 
 include $(BUILD_EXECUTABLE)
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index bb910ad..56813c4 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -501,7 +501,7 @@
 status_t C2SoftAvcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN64(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -909,7 +909,7 @@
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
                 mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 389ea61..6db4387 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -497,7 +497,7 @@
 status_t C2SoftHevcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN64(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -904,7 +904,7 @@
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (s_decode_op.u4_pic_wd != mWidth ||  s_decode_op.u4_pic_ht != mHeight) {
                 mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 941cf54..0742091 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1017,6 +1017,16 @@
     return aps->getDevicesForStream(stream);
 }
 
+status_t AudioSystem::getDevicesForAttributes(const AudioAttributes &aa,
+                                              AudioDeviceTypeAddrVector *devices) {
+    if (devices == nullptr) {
+        return BAD_VALUE;
+    }
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+    if (aps == 0) return PERMISSION_DENIED;
+    return aps->getDevicesForAttributes(aa, devices);
+}
+
 audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
 {
     const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index b9e6e33..87802a1 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -109,6 +109,7 @@
     SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
     REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
     GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY,
+    GET_DEVICES_FOR_ATTRIBUTES,
 };
 
 #define MAX_ITEMS_PER_LIST 1024
@@ -1348,6 +1349,41 @@
         }
         return static_cast<status_t>(reply.readInt32());
     }
+
+    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+            AudioDeviceTypeAddrVector *devices) const
+    {
+        if (devices == nullptr) {
+            return BAD_VALUE;
+        }
+        Parcel data, reply;
+        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+        status_t status = aa.writeToParcel(&data);
+        if (status != NO_ERROR) {
+            return status;
+        }
+        status = remote()->transact(GET_DEVICES_FOR_ATTRIBUTES, data, &reply);
+        if (status != NO_ERROR) {
+            // transaction failed, return error
+            return status;
+        }
+        status = static_cast<status_t>(reply.readInt32());
+        if (status != NO_ERROR) {
+            // APM method call failed, return error
+            return status;
+        }
+
+        const size_t numberOfDevices = (size_t)reply.readInt32();
+        for (size_t i = 0; i < numberOfDevices; i++) {
+            AudioDeviceTypeAddr device;
+            if (device.readFromParcel((Parcel*)&reply) == NO_ERROR) {
+                devices->push_back(device);
+            } else {
+                return FAILED_TRANSACTION;
+            }
+        }
+        return NO_ERROR;
+    }
 };
 
 IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -1414,7 +1450,8 @@
         case IS_CALL_SCREEN_MODE_SUPPORTED:
         case SET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
         case REMOVE_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
-        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY: {
+        case GET_PREFERRED_DEVICE_FOR_PRODUCT_STRATEGY:
+        case GET_DEVICES_FOR_ATTRIBUTES: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -2473,6 +2510,37 @@
             return NO_ERROR;
         }
 
+        case GET_DEVICES_FOR_ATTRIBUTES: {
+            CHECK_INTERFACE(IAudioPolicyService, data, reply);
+            AudioAttributes attributes;
+            status_t status = attributes.readFromParcel(&data);
+            if (status != NO_ERROR) {
+                return status;
+            }
+            AudioDeviceTypeAddrVector devices;
+            status = getDevicesForAttributes(attributes.getAttributes(), &devices);
+            // reply data formatted as:
+            //  - (int32) method call result from APM
+            //  - (int32) number of devices (n) if method call returned NO_ERROR
+            //  - n AudioDeviceTypeAddr         if method call returned NO_ERROR
+            reply->writeInt32(status);
+            if (status != NO_ERROR) {
+                return NO_ERROR;
+            }
+            status = reply->writeInt32(devices.size());
+            if (status != NO_ERROR) {
+                return status;
+            }
+            for (const auto& device : devices) {
+                status = device.writeToParcel(reply);
+                if (status != NO_ERROR) {
+                    return status;
+                }
+            }
+
+            return NO_ERROR;
+        }
+
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index c4b528e..ae8b59c 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -279,6 +279,8 @@
 
     static uint32_t getStrategyForStream(audio_stream_type_t stream);
     static audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+    static status_t getDevicesForAttributes(const AudioAttributes &aa,
+                                            AudioDeviceTypeAddrVector *devices);
 
     static audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
     static status_t registerEffect(const effect_descriptor_t *desc,
diff --git a/media/libaudioclient/include/media/IAudioPolicyService.h b/media/libaudioclient/include/media/IAudioPolicyService.h
index 6623061..742762d 100644
--- a/media/libaudioclient/include/media/IAudioPolicyService.h
+++ b/media/libaudioclient/include/media/IAudioPolicyService.h
@@ -108,6 +108,8 @@
 
     virtual uint32_t getStrategyForStream(audio_stream_type_t stream) = 0;
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
+    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+            AudioDeviceTypeAddrVector *devices) const = 0;
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
     virtual status_t registerEffect(const effect_descriptor_t *desc,
                                     audio_io_handle_t io,
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
index 89d6ce2..1179d6c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidTest.xml
@@ -15,6 +15,7 @@
 -->
 <configuration description="Runs Media Benchmark Tests">
     <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+        <option name="cleanup-apks" value="false" />
         <option name="test-file-name" value="MediaBenchmarkTest.apk" />
     </target_preparer>
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
index 07d414d..c41f198 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
@@ -57,7 +57,7 @@
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
     private static final String mStatsFile =
-            mContext.getFilesDir() + "/Decoder." + System.currentTimeMillis() + ".csv";
+            mContext.getExternalFilesDir(null) + "/Decoder." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "DecoderTest";
     private static final long PER_TEST_TIMEOUT_MS = 60000;
     private static final boolean DEBUG = false;
@@ -114,6 +114,7 @@
         Stats mStats = new Stats();
         boolean status = mStats.writeStatsHeader(mStatsFile);
         assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
     }
 
     @Test(timeout = PER_TEST_TIMEOUT_MS)
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
index 00e5e21..831467a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
@@ -57,7 +57,7 @@
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mOutputFilePath = mContext.getString(R.string.output_file_path);
     private static final String mStatsFile =
-            mContext.getFilesDir() + "/Encoder." + System.currentTimeMillis() + ".csv";
+            mContext.getExternalFilesDir(null) + "/Encoder." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "EncoderTest";
     private static final long PER_TEST_TIMEOUT_MS = 120000;
     private static final boolean DEBUG = false;
@@ -94,6 +94,7 @@
         Stats mStats = new Stats();
         boolean status = mStats.writeStatsHeader(mStatsFile);
         assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
     }
 
     @Test(timeout = PER_TEST_TIMEOUT_MS)
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
index a33ecfe..6b7aad1 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/ExtractorTest.java
@@ -50,8 +50,8 @@
     private static Context mContext =
             InstrumentationRegistry.getInstrumentation().getTargetContext();
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
-    private static final String mStatsFile =
-            mContext.getFilesDir() + "/Extractor." + System.currentTimeMillis() + ".csv";
+    private static final String mStatsFile = mContext.getExternalFilesDir(null) + "/Extractor."
+            + System.currentTimeMillis() + ".csv";
     private static final String TAG = "ExtractorTest";
     private String mInputFileName;
     private int mTrackId;
@@ -84,6 +84,7 @@
         Stats mStats = new Stats();
         boolean status = mStats.writeStatsHeader(mStatsFile);
         assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
     }
 
     @Test
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
index b69c57b..2efdba2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
@@ -58,7 +58,7 @@
             InstrumentationRegistry.getInstrumentation().getTargetContext();
     private static final String mInputFilePath = mContext.getString(R.string.input_file_path);
     private static final String mStatsFile =
-            mContext.getFilesDir() + "/Muxer." + System.currentTimeMillis() + ".csv";
+            mContext.getExternalFilesDir(null) + "/Muxer." + System.currentTimeMillis() + ".csv";
     private static final String TAG = "MuxerTest";
     private static final Map<String, Integer> mMapFormat = new Hashtable<String, Integer>() {
         {
@@ -106,6 +106,7 @@
         Stats mStats = new Stats();
         boolean status = mStats.writeStatsHeader(mStatsFile);
         assertTrue("Unable to open stats file for writing!", status);
+        Log.d(TAG, "Saving Benchmark results in: " + mStatsFile);
     }
 
     @Test
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index c5cdc25..dd0cd9b 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -192,6 +192,10 @@
     // return the enabled output devices for the given stream type
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream) = 0;
 
+    // retrieves the list of enabled output devices for the given audio attributes
+    virtual status_t getDevicesForAttributes(const audio_attributes_t &attr,
+                                             AudioDeviceTypeAddrVector *devices) = 0;
+
     // Audio effect management
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc) = 0;
     virtual status_t registerEffect(const effect_descriptor_t *desc,
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 20c0a24..b1103ab 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -197,7 +197,9 @@
                 ALOGV("%s: Mix %zu ignored as secondaryOutput because not opened yet", __func__, i);
             } else {
                 ALOGV("%s: Add a secondary desc %zu", __func__, i);
-                secondaryDescs->push_back(policyDesc);
+                if (secondaryDescs != nullptr) {
+                    secondaryDescs->push_back(policyDesc);
+                }
             }
         }
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index d1b85c8..75c89aa 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -971,19 +971,21 @@
     if (usePrimaryOutputFromPolicyMixes) {
         *output = policyDesc->mIoHandle;
         sp<AudioPolicyMix> mix = policyDesc->mPolicyMix.promote();
-        sp<DeviceDescriptor> deviceDesc =
-                mAvailableOutputDevices.getDevice(mix->mDeviceType,
-                                                  mix->mDeviceAddress,
-                                                  AUDIO_FORMAT_DEFAULT);
-        *selectedDeviceId = deviceDesc != 0 ? deviceDesc->getId() : AUDIO_PORT_HANDLE_NONE;
+        if (mix != nullptr) {
+            sp<DeviceDescriptor> deviceDesc =
+                    mAvailableOutputDevices.getDevice(mix->mDeviceType,
+                                                      mix->mDeviceAddress,
+                                                      AUDIO_FORMAT_DEFAULT);
+            *selectedDeviceId = deviceDesc != 0 ? deviceDesc->getId() : AUDIO_PORT_HANDLE_NONE;
 
-        ALOGV("getOutputForAttr() returns output %d", *output);
-        if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
-            *outputType = API_OUT_MIX_PLAYBACK;
-        } else {
-            *outputType = API_OUTPUT_LEGACY;
+            ALOGV("getOutputForAttr() returns output %d", *output);
+            if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
+                *outputType = API_OUT_MIX_PLAYBACK;
+            } else {
+                *outputType = API_OUTPUT_LEGACY;
+            }
+            return NO_ERROR;
         }
-        return NO_ERROR;
     }
     // Virtual sources must always be dynamicaly or explicitly routed
     if (resultAttr->usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
@@ -1646,7 +1648,7 @@
     DeviceVector devices;
     sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
     const char *address = NULL;
-    if (policyMix != NULL) {
+    if (policyMix != nullptr) {
         audio_devices_t newDeviceType;
         address = policyMix->mDeviceAddress.string();
         if ((policyMix->mRouteFlags & MIX_ROUTE_FLAG_LOOP_BACK) == MIX_ROUTE_FLAG_LOOP_BACK) {
@@ -1828,7 +1830,7 @@
             sp<AudioPolicyMix> policyMix = outputDesc->mPolicyMix.promote();
             if (isSingleDeviceType(
                     outputDesc->devices().types(), &audio_is_remote_submix_device) &&
-                policyMix != NULL &&
+                policyMix != nullptr &&
                 policyMix->mMixType == MIX_TYPE_RECORDERS) {
                 setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX,
                                             AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
@@ -2275,7 +2277,7 @@
     if (status == NO_ERROR && inputDesc->activeCount() == 1) {
         sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
         // if input maps to a dynamic policy with an activity listener, notify of state change
-        if ((policyMix != NULL)
+        if ((policyMix != nullptr)
                 && ((policyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
             mpClientInterface->onDynamicPolicyMixStateUpdate(policyMix->mDeviceAddress,
                     MIX_STATE_MIXING);
@@ -2292,7 +2294,7 @@
         // For remote submix (a virtual device), we open only one input per capture request.
         if (audio_is_remote_submix_device(inputDesc->getDeviceType())) {
             String8 address = String8("");
-            if (policyMix == NULL) {
+            if (policyMix == nullptr) {
                 address = String8("0");
             } else if (policyMix->mMixType == MIX_TYPE_PLAYERS) {
                 address = policyMix->mDeviceAddress;
@@ -2339,7 +2341,7 @@
     } else {
         sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
         // if input maps to a dynamic policy with an activity listener, notify of state change
-        if ((policyMix != NULL)
+        if ((policyMix != nullptr)
                 && ((policyMix->mCbFlags & AudioMix::kCbFlagNotifyActivity) != 0)) {
             mpClientInterface->onDynamicPolicyMixStateUpdate(policyMix->mDeviceAddress,
                     MIX_STATE_IDLE);
@@ -2349,7 +2351,7 @@
         // used by a policy mix of type MIX_TYPE_RECORDERS
         if (audio_is_remote_submix_device(inputDesc->getDeviceType())) {
             String8 address = String8("");
-            if (policyMix == NULL) {
+            if (policyMix == nullptr) {
                 address = String8("0");
             } else if (policyMix->mMixType == MIX_TYPE_PLAYERS) {
                 address = policyMix->mDeviceAddress;
@@ -5437,6 +5439,35 @@
     return deviceTypesToBitMask(devices.types());
 }
 
+status_t AudioPolicyManager::getDevicesForAttributes(
+        const audio_attributes_t &attr, AudioDeviceTypeAddrVector *devices) {
+    if (devices == nullptr) {
+        return BAD_VALUE;
+    }
+    // check dynamic policies but only for primary descriptors (secondary not used for audible
+    // audio routing, only used for duplication for playback capture)
+    sp<SwAudioOutputDescriptor> policyDesc;
+    status_t status = mPolicyMixes.getOutputForAttr(attr, 0 /*uid unknown here*/,
+            AUDIO_OUTPUT_FLAG_NONE, policyDesc, nullptr);
+    if (status != OK) {
+        return status;
+    }
+    if (policyDesc != nullptr) {
+        sp<AudioPolicyMix> mix = policyDesc->mPolicyMix.promote();
+        if (mix != nullptr) {
+            AudioDeviceTypeAddr device(mix->mDeviceType, mix->mDeviceAddress.c_str());
+            devices->push_back(device);
+            return NO_ERROR;
+        }
+    }
+
+    DeviceVector curDevices = mEngine->getOutputDevicesForAttributes(attr, nullptr, false);
+    for (const auto& device : curDevices) {
+        devices->push_back(device->getDeviceTypeAddr());
+    }
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t stream) {
     switch(stream) {
     case AUDIO_STREAM_MUSIC:
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index d6c1016..7e0e16f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -193,6 +193,10 @@
         // return the enabled output devices for the given stream type
         virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
 
+        virtual status_t getDevicesForAttributes(
+                const audio_attributes_t &attributes,
+                AudioDeviceTypeAddrVector *devices);
+
         virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc = NULL);
         virtual status_t registerEffect(const effect_descriptor_t *desc,
                                         audio_io_handle_t io,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index d245231..68a2a8c 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -806,6 +806,17 @@
     return mAudioPolicyManager->getDevicesForStream(stream);
 }
 
+status_t AudioPolicyService::getDevicesForAttributes(const AudioAttributes &aa,
+                                             AudioDeviceTypeAddrVector *devices) const
+{
+    if (mAudioPolicyManager == NULL) {
+        return NO_INIT;
+    }
+    Mutex::Autolock _l(mLock);
+    AutoCallerClear acc;
+    return mAudioPolicyManager->getDevicesForAttributes(aa.getAttributes(), devices);
+}
+
 audio_io_handle_t AudioPolicyService::getOutputForEffect(const effect_descriptor_t *desc)
 {
     // FIXME change return type to status_t, and return NO_INIT here
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 135b3ac..84adcc2 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -127,6 +127,8 @@
 
     virtual uint32_t getStrategyForStream(audio_stream_type_t stream);
     virtual audio_devices_t getDevicesForStream(audio_stream_type_t stream);
+    virtual status_t getDevicesForAttributes(const AudioAttributes &aa,
+                                             AudioDeviceTypeAddrVector *devices) const;
 
     virtual audio_io_handle_t getOutputForEffect(const effect_descriptor_t *desc);
     virtual status_t registerEffect(const effect_descriptor_t *desc,