Merge "Effects: Add debug float logging"
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 07d4e7e..4ced08c 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -79,8 +79,9 @@
      * <p>
      * @param operatingMode The kind of session to create; either NORMAL_MODE or
      *     CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
+     * @param sessionParams Session wide camera parameters
      */
-    void endConfigure(int operatingMode);
+    void endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
 
     void deleteStream(int streamId);
 
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 0597950..983d29b 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -44,6 +44,8 @@
 
     mMetadata.clear();
     mSurfaceList.clear();
+    mStreamIdxList.clear();
+    mSurfaceIdxList.clear();
 
     status_t err = OK;
 
@@ -53,6 +55,13 @@
     }
     ALOGV("%s: Read metadata from parcel", __FUNCTION__);
 
+    int isReprocess = 0;
+    if ((err = parcel->readInt32(&isReprocess)) != OK) {
+        ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+        return err;
+    }
+    mIsReprocess = (isReprocess != 0);
+
     int32_t size;
     if ((err = parcel->readInt32(&size)) != OK) {
         ALOGE("%s: Failed to read surface list size from parcel", __FUNCTION__);
@@ -61,7 +70,7 @@
     ALOGV("%s: Read surface list size = %d", __FUNCTION__, size);
 
     // Do not distinguish null arrays from 0-sized arrays.
-    for (int i = 0; i < size; ++i) {
+    for (int32_t i = 0; i < size; ++i) {
         // Parcel.writeParcelableArray
         size_t len;
         const char16_t* className = parcel->readString16Inplace(&len);
@@ -88,12 +97,32 @@
         mSurfaceList.push_back(surface);
     }
 
-    int isReprocess = 0;
-    if ((err = parcel->readInt32(&isReprocess)) != OK) {
-        ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+    int32_t streamSurfaceSize;
+    if ((err = parcel->readInt32(&streamSurfaceSize)) != OK) {
+        ALOGE("%s: Failed to read streamSurfaceSize from parcel", __FUNCTION__);
         return err;
     }
-    mIsReprocess = (isReprocess != 0);
+
+    if (streamSurfaceSize < 0) {
+        ALOGE("%s: Bad streamSurfaceSize %d from parcel", __FUNCTION__, streamSurfaceSize);
+        return BAD_VALUE;
+    }
+
+    for (int32_t i = 0; i < streamSurfaceSize; ++i) {
+        int streamIdx;
+        if ((err = parcel->readInt32(&streamIdx)) != OK) {
+            ALOGE("%s: Failed to read stream index from parcel", __FUNCTION__);
+            return err;
+        }
+        mStreamIdxList.push_back(streamIdx);
+
+        int surfaceIdx;
+        if ((err = parcel->readInt32(&surfaceIdx)) != OK) {
+            ALOGE("%s: Failed to read surface index from parcel", __FUNCTION__);
+            return err;
+        }
+        mSurfaceIdxList.push_back(surfaceIdx);
+    }
 
     return OK;
 }
@@ -110,28 +139,43 @@
         return err;
     }
 
-    int32_t size = static_cast<int32_t>(mSurfaceList.size());
+    parcel->writeInt32(mIsReprocess ? 1 : 0);
 
-    // Send 0-sized arrays when it's empty. Do not send null arrays.
-    parcel->writeInt32(size);
+    if (mSurfaceConverted) {
+        parcel->writeInt32(0); // 0-sized array
+    } else {
+        int32_t size = static_cast<int32_t>(mSurfaceList.size());
 
-    for (int32_t i = 0; i < size; ++i) {
-        // not sure if readParcelableArray does this, hard to tell from source
-        parcel->writeString16(String16("android.view.Surface"));
+        // Send 0-sized arrays when it's empty. Do not send null arrays.
+        parcel->writeInt32(size);
 
-        // Surface.writeToParcel
-        view::Surface surfaceShim;
-        surfaceShim.name = String16("unknown_name");
-        surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
-        if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
-            ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
-                    __FUNCTION__, i, strerror(-err), err);
-            return err;
+        for (int32_t i = 0; i < size; ++i) {
+            // not sure if readParcelableArray does this, hard to tell from source
+            parcel->writeString16(String16("android.view.Surface"));
+
+            // Surface.writeToParcel
+            view::Surface surfaceShim;
+            surfaceShim.name = String16("unknown_name");
+            surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
+            if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
+                ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
+                        __FUNCTION__, i, strerror(-err), err);
+                return err;
+            }
         }
     }
 
-    parcel->writeInt32(mIsReprocess ? 1 : 0);
-
+    parcel->writeInt32(mStreamIdxList.size());
+    for (size_t i = 0; i < mStreamIdxList.size(); ++i) {
+        if ((err = parcel->writeInt32(mStreamIdxList[i])) != OK) {
+            ALOGE("%s: Failed to write stream index to parcel", __FUNCTION__);
+            return err;
+        }
+        if ((err = parcel->writeInt32(mSurfaceIdxList[i])) != OK) {
+            ALOGE("%s: Failed to write surface index to parcel", __FUNCTION__);
+            return err;
+        }
+    }
     return OK;
 }
 
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index e39dfcf..c53799f 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -41,14 +41,30 @@
     virtual ~CaptureRequest();
 
     CameraMetadata          mMetadata;
+
+    // Used by NDK client to pass surfaces by stream/surface index.
+    bool                    mSurfaceConverted = false;
+
+    // Starting in Android O, create a Surface from Parcel will take one extra
+    // IPC call.
     Vector<sp<Surface> >    mSurfaceList;
+    // Optional way of passing surface list since passing Surface over binder
+    // is expensive. Use the stream/surface index from current output configuration
+    // to represent an configured output Surface. When stream/surface index is used,
+    // set mSurfaceList to zero length to save unparcel time.
+    Vector<int>             mStreamIdxList;
+    Vector<int>             mSurfaceIdxList; // per stream surface list index
+
     bool                    mIsReprocess;
+
     void*                   mContext; // arbitrary user context from NDK apps, null for java apps
 
     /**
      * Keep impl up-to-date with CaptureRequest.java in frameworks/base
      */
+    // used by cameraserver to receive CaptureRequest from java/NDK client
     status_t                readFromParcel(const android::Parcel* parcel) override;
+    // used by NDK client to send CaptureRequest to cameraserver
     status_t                writeToParcel(android::Parcel* parcel) const override;
 };
 
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index ac4beea..812a312 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -220,5 +220,21 @@
                 __FUNCTION__, device, outputs, callbacks, session);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    return device->createCaptureSession(outputs, callbacks, session);
+    return device->createCaptureSession(outputs, nullptr, callbacks, session);
+}
+
+EXPORT
+camera_status_t ACameraDevice_createCaptureSessionWithSessionParameters(
+        ACameraDevice* device,
+        const ACaptureSessionOutputContainer*       outputs,
+        const ACaptureRequest* sessionParameters,
+        const ACameraCaptureSession_stateCallbacks* callbacks,
+        /*out*/ACameraCaptureSession** session) {
+    ATRACE_CALL();
+    if (device == nullptr || outputs == nullptr || callbacks == nullptr || session == nullptr) {
+        ALOGE("%s: Error: invalid input: device %p, outputs %p, callbacks %p, session %p",
+                __FUNCTION__, device, outputs, callbacks, session);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return device->createCaptureSession(outputs, sessionParameters, callbacks, session);
 }
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 6d1d5ce..f60e5fd 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -159,7 +159,7 @@
     dev->lockDeviceForSessionOps();
     {
         Mutex::Autolock _l(mSessionLock);
-        ret = dev->updateOutputConfiguration(output);
+        ret = dev->updateOutputConfigurationLocked(output);
     }
     dev->unlockDevice();
     return ret;
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 45fa28e..f7cea4f 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -157,6 +157,7 @@
 camera_status_t
 CameraDevice::createCaptureSession(
         const ACaptureSessionOutputContainer*       outputs,
+        const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
         /*out*/ACameraCaptureSession** session) {
     sp<ACameraCaptureSession> currentSession = mCurrentSession.promote();
@@ -172,7 +173,7 @@
     }
 
     // Create new session
-    ret = configureStreamsLocked(outputs);
+    ret = configureStreamsLocked(outputs, sessionParameters);
     if (ret != ACAMERA_OK) {
         ALOGE("Fail to create new session. cannot configure streams");
         return ret;
@@ -289,7 +290,7 @@
     return ACAMERA_OK;
 }
 
-camera_status_t CameraDevice::updateOutputConfiguration(ACaptureSessionOutput *output) {
+camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         return ret;
@@ -361,6 +362,7 @@
                 return ACAMERA_ERROR_UNKNOWN;
         }
     }
+    mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfig);
 
     return ACAMERA_OK;
 }
@@ -373,6 +375,7 @@
     req->mMetadata = request->settings->getInternalData();
     req->mIsReprocess = false; // NDK does not support reprocessing yet
     req->mContext = request->context;
+    req->mSurfaceConverted = true; // set to true, and fill in stream/surface idx to speed up IPC
 
     for (auto outputTarget : request->targets->mOutputs) {
         ANativeWindow* anw = outputTarget.mWindow;
@@ -383,7 +386,31 @@
             return ret;
         }
         req->mSurfaceList.push_back(surface);
+
+        bool found = false;
+        // lookup stream/surface ID
+        for (const auto& kvPair : mConfiguredOutputs) {
+            int streamId = kvPair.first;
+            const OutputConfiguration& outConfig = kvPair.second.second;
+            const auto& gbps = outConfig.getGraphicBufferProducers();
+            for (int surfaceId = 0; surfaceId < (int) gbps.size(); surfaceId++) {
+                if (gbps[surfaceId] == surface->getIGraphicBufferProducer()) {
+                    found = true;
+                    req->mStreamIdxList.push_back(streamId);
+                    req->mSurfaceIdxList.push_back(surfaceId);
+                    break;
+                }
+            }
+            if (found) {
+                break;
+            }
+        }
+        if (!found) {
+            ALOGE("Unconfigured output target %p in capture request!", anw);
+            return ret;
+        }
     }
+
     outReq = req;
     return ACAMERA_OK;
 }
@@ -434,7 +461,7 @@
     }
 
     // No new session, unconfigure now
-    camera_status_t ret = configureStreamsLocked(nullptr);
+    camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
     if (ret != ACAMERA_OK) {
         ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
     }
@@ -564,17 +591,11 @@
 CameraDevice::getIGBPfromAnw(
         ANativeWindow* anw,
         sp<IGraphicBufferProducer>& out) {
-    if (anw == nullptr) {
-        ALOGE("Error: output ANativeWindow is null");
-        return ACAMERA_ERROR_INVALID_PARAMETER;
+    sp<Surface> surface;
+    camera_status_t ret = getSurfaceFromANativeWindow(anw, surface);
+    if (ret != ACAMERA_OK) {
+        return ret;
     }
-    int value;
-    int err = (*anw->query)(anw, NATIVE_WINDOW_CONCRETE_TYPE, &value);
-    if (err != OK || value != NATIVE_WINDOW_SURFACE) {
-        ALOGE("Error: ANativeWindow is not backed by Surface!");
-        return ACAMERA_ERROR_INVALID_PARAMETER;
-    }
-    const sp<Surface> surface(static_cast<Surface*>(anw));
     out = surface->getIGraphicBufferProducer();
     return ACAMERA_OK;
 }
@@ -598,7 +619,8 @@
 }
 
 camera_status_t
-CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs) {
+CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+        const ACaptureRequest* sessionParameters) {
     ACaptureSessionOutputContainer emptyOutput;
     if (outputs == nullptr) {
         outputs = &emptyOutput;
@@ -694,7 +716,11 @@
         mConfiguredOutputs.insert(std::make_pair(streamId, outputPair));
     }
 
-    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false);
+    CameraMetadata params;
+    if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
+        params.append(sessionParameters->settings->getInternalData());
+    }
+    remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params);
     if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
         ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
                 remoteRet.toString8().string());
@@ -809,19 +835,26 @@
             setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
             return;
         }
-        ANativeWindow* anw = outputPairIt->second.first;
 
-        ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
-                getId(), anw, frameNumber);
+        const auto& gbps = outputPairIt->second.second.getGraphicBufferProducers();
+        for (const auto& outGbp : gbps) {
+            for (auto surface : request->mSurfaceList) {
+                if (surface->getIGraphicBufferProducer() == outGbp) {
+                    ANativeWindow* anw = static_cast<ANativeWindow*>(surface.get());
+                    ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
+                            getId(), anw, frameNumber);
 
-        sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
-        msg->setPointer(kContextKey, cbh.mCallbacks.context);
-        msg->setObject(kSessionSpKey, session);
-        msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
-        msg->setObject(kCaptureRequestKey, request);
-        msg->setPointer(kAnwKey, (void*) anw);
-        msg->setInt64(kFrameNumberKey, frameNumber);
-        postSessionMsgAndCleanup(msg);
+                    sp<AMessage> msg = new AMessage(kWhatCaptureBufferLost, mHandler);
+                    msg->setPointer(kContextKey, cbh.mCallbacks.context);
+                    msg->setObject(kSessionSpKey, session);
+                    msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
+                    msg->setObject(kCaptureRequestKey, request);
+                    msg->setPointer(kAnwKey, (void*) anw);
+                    msg->setInt64(kFrameNumberKey, frameNumber);
+                    postSessionMsgAndCleanup(msg);
+                }
+            }
+        }
     } else { // Handle other capture failures
         // Fire capture failure callback if there is one registered
         ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 23cc1a1..1db3dfb 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -60,6 +60,7 @@
 
     camera_status_t createCaptureSession(
             const ACaptureSessionOutputContainer*       outputs,
+            const ACaptureRequest* sessionParameters,
             const ACameraCaptureSession_stateCallbacks* callbacks,
             /*out*/ACameraCaptureSession** session);
 
@@ -123,9 +124,9 @@
             /*out*/int* captureSequenceId,
             bool isRepeating);
 
-    camera_status_t updateOutputConfiguration(ACaptureSessionOutput *output);
+    camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
-    static camera_status_t allocateCaptureRequest(
+    camera_status_t allocateCaptureRequest(
             const ACaptureRequest* request, sp<CaptureRequest>& outReq);
 
     static ACaptureRequest* allocateACaptureRequest(sp<CaptureRequest>& req);
@@ -139,7 +140,8 @@
     // For capture session to notify its end of life
     void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
 
-    camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs);
+    camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+           const ACaptureRequest* sessionParameters);
 
     // Input message will be posted and cleared after this returns
     void postSessionMsgAndCleanup(sp<AMessage>& msg);
@@ -309,9 +311,10 @@
 
     camera_status_t createCaptureSession(
             const ACaptureSessionOutputContainer*       outputs,
+            const ACaptureRequest* sessionParameters,
             const ACameraCaptureSession_stateCallbacks* callbacks,
             /*out*/ACameraCaptureSession** session) {
-        return mDevice->createCaptureSession(outputs, callbacks, session);
+        return mDevice->createCaptureSession(outputs, sessionParameters, callbacks, session);
     }
 
     /***********************
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7b33c32..29ad09b 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -235,7 +235,7 @@
 }
 
 const CameraMetadata&
-ACameraMetadata::getInternalData() {
+ACameraMetadata::getInternalData() const {
     return mData;
 }
 
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 143efc7..0fd7efa 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -64,7 +64,7 @@
     void filterUnsupportedFeatures(); // Hide features not yet supported by NDK
     void filterStreamConfigurations(); // Hide input streams, translate hal format to NDK formats
 
-    const CameraMetadata& getInternalData();
+    const CameraMetadata& getInternalData() const;
 
     template<typename INTERNAL_T, typename NDK_T>
     camera_status_t updateImpl(uint32_t tag, uint32_t count, const NDK_T* data) {
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 3a6f3ef..61deb46 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -720,6 +720,39 @@
 camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *output,
         ANativeWindow* anw);
 
+/**
+ * Create a new camera capture session similar to {@link ACameraDevice_createCaptureSession}. This
+ * function allows clients to pass additional session parameters during session initialization. For
+ * further information about session parameters see {@link ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS}.
+ *
+ * @param device the camera device of interest.
+ * @param outputs the {@link ACaptureSessionOutputContainer} describes all output streams.
+ * @param sessionParameters An optional capture request that contains the initial values of session
+ *                          parameters advertised in
+ *                          {@link ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS}.
+ * @param callbacks the {@link ACameraCaptureSession_stateCallbacks}
+ *                  capture session state callbacks.
+ * @param session the created {@link ACameraCaptureSession} will be filled here if the method call
+ *                succeeds.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the method call succeeds. The created capture session will be
+ *                                filled in session argument.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if any of device, outputs, callbacks or
+ *                                session is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.</li>
+ *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.
+ *         </li>
+ *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
+ */
+camera_status_t ACameraDevice_createCaptureSessionWithSessionParameters(
+        ACameraDevice* device,
+        const ACaptureSessionOutputContainer* outputs,
+        const ACaptureRequest* sessionParameters,
+        const ACameraCaptureSession_stateCallbacks* callbacks,
+        /*out*/ACameraCaptureSession** session);
+
 #endif /* __ANDROID_API__ >= 28 */
 
 __END_DECLS
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 4f893f1..80d460f 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2877,6 +2877,59 @@
      */
     ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS =            // int32[n]
             ACAMERA_REQUEST_START + 15,
+    /**
+     * <p>A subset of the available request keys that the camera device
+     * can pass as part of the capture session initialization.</p>
+     *
+     * <p>Type: int32[n]</p>
+     *
+     * <p>This tag may appear in:
+     * <ul>
+     *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+     * </ul></p>
+     *
+     * <p>This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which
+     * contains a list of keys that are difficult to apply per-frame and
+     * can result in unexpected delays when modified during the capture session
+     * lifetime. Typical examples include parameters that require a
+     * time-consuming hardware re-configuration or internal camera pipeline
+     * change. For performance reasons we advise clients to pass their initial
+     * values as part of
+     * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.i
+     * Once the camera capture session is enabled it is also recommended to avoid
+     * changing them from their initial values set in
+     * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+     * Control over session parameters can still be exerted in capture requests
+     * but clients should be aware and expect delays during their application.
+     * An example usage scenario could look like this:</p>
+     * <ul>
+     * <li>The camera client starts by quering the session parameter key list via
+     *   {@link ACameraManager_getCameraCharacteristics }.</li>
+     * <li>Before triggering the capture session create sequence, a capture request
+     *   must be built via
+     *   {@link ACameraDevice_createCaptureRequest }
+     *   using an appropriate template matching the particular use case.</li>
+     * <li>The client should go over the list of session parameters and check
+     *   whether some of the keys listed matches with the parameters that
+     *   they intend to modify as part of the first capture request.</li>
+     * <li>If there is no such match, the capture request can be  passed
+     *   unmodified to
+     *   {@link ACameraDevice_createCaptureSessionWithSessionParameters }.</li>
+     * <li>If matches do exist, the client should update the respective values
+     *   and pass the request to
+     *   {@link ACameraDevice_createCaptureSessionWithSessionParameters }.</li>
+     * <li>After the capture session initialization completes the session parameter
+     *   key list can continue to serve as reference when posting or updating
+     *   further requests. As mentioned above further changes to session
+     *   parameters should ideally be avoided, if updates are necessary
+     *   however clients could expect a delay/glitch during the
+     *   parameter switch.</li>
+     * </ul>
+     *
+     * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
+     */
+    ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS =                    // int32[n]
+            ACAMERA_REQUEST_START + 16,
     ACAMERA_REQUEST_END,
 
     /**
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 58d239b..d179aa0 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -10,6 +10,7 @@
     ACameraDevice_close;
     ACameraDevice_createCaptureRequest;
     ACameraDevice_createCaptureSession;
+    ACameraDevice_createCaptureSessionWithSessionParameters;
     ACameraDevice_getId;
     ACameraManager_create;
     ACameraManager_delete;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 51d9214..8fe9a86 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -476,7 +476,8 @@
         res = device->createStream(output, &streamId);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_LE(0, streamId);
-        res = device->endConfigure(/*isConstrainedHighSpeed*/ false);
+        CameraMetadata sessionParams;
+        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(callbacks->hadError());
 
@@ -574,7 +575,7 @@
         EXPECT_TRUE(res.isOk()) << res;
         res = device->deleteStream(streamId);
         EXPECT_TRUE(res.isOk()) << res;
-        res = device->endConfigure(/*isConstrainedHighSpeed*/ false);
+        res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
         EXPECT_TRUE(res.isOk()) << res;
 
         sleep(/*second*/1); // allow some time for errors to show up, if any
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index c284f73..50fe385 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -631,7 +631,7 @@
         || (channelMask != mInChannelMask)) {
         size_t inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
         if (inBuffSize == 0) {
-            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x",
+            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %#x",
                     sampleRate, format, channelMask);
             return BAD_VALUE;
         }
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index c8fa618..6d829a0 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -2674,13 +2674,13 @@
     String8 result;
 
     result.append(" AudioTrack::dump\n");
-    result.appendFormat("  status(%d), state(%d), session Id(%d), flags(%x)\n",
+    result.appendFormat("  status(%d), state(%d), session Id(%d), flags(%#x)\n",
                         mStatus, mState, mSessionId, mFlags);
     result.appendFormat("  stream type(%d), left - right volume(%f, %f)\n",
                         (mStreamType == AUDIO_STREAM_DEFAULT) ?
                                 audio_attributes_to_stream_type(&mAttributes) : mStreamType,
                         mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
-    result.appendFormat("  format(%x), channel mask(%x), channel count(%u)\n",
+    result.appendFormat("  format(%#x), channel mask(%#x), channel count(%u)\n",
                   mFormat, mChannelMask, mChannelCount);
     result.appendFormat("  sample rate(%u), original sample rate(%u), speed(%f)\n",
                   mSampleRate, mOriginalSampleRate, mPlaybackRate.mSpeed);
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 9fef60f..3c975c3 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2042,8 +2042,8 @@
                                   uint32_t *latencyMs,
                                   audio_output_flags_t flags)
 {
-    ALOGI("openOutput() this %p, module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, "
-              "flags %x",
+    ALOGI("openOutput() this %p, module %d Device %#x, SamplingRate %d, Format %#08x, "
+              "Channels %#x, flags %#x",
               this, module,
               (devices != NULL) ? *devices : 0,
               config->sample_rate,
@@ -2285,8 +2285,8 @@
     sp<StreamInHalInterface> inStream;
     status_t status = inHwHal->openInputStream(
             *input, devices, &halconfig, flags, address.string(), source, &inStream);
-    ALOGV("openInput_l() openInputStream returned input %p, devices %x, SamplingRate %d"
-           ", Format %#x, Channels %x, flags %#x, status %d addr %s",
+    ALOGV("openInput_l() openInputStream returned input %p, devices %#x, SamplingRate %d"
+           ", Format %#x, Channels %#x, flags %#x, status %d addr %s",
             inStream.get(),
             devices,
             halconfig.sample_rate,
diff --git a/services/audioflinger/BufLog.cpp b/services/audioflinger/BufLog.cpp
index 9680eb5..2780290 100644
--- a/services/audioflinger/BufLog.cpp
+++ b/services/audioflinger/BufLog.cpp
@@ -121,7 +121,7 @@
     } else {
         mTag[0] = 0;
     }
-    ALOGV("Creating BufLogStream id:%d tag:%s format:%d ch:%d sr:%d maxbytes:%zu", mId, mTag,
+    ALOGV("Creating BufLogStream id:%d tag:%s format:%#x ch:%d sr:%d maxbytes:%zu", mId, mTag,
             mFormat, mChannels, mSamplingRate, mMaxBytes);
 
     //open file (s), info about tag, format, etc.
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0601233..3bb5803 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1412,7 +1412,7 @@
     bool chainCreated = false;
 
     ALOGD_IF((mType == OFFLOAD) && !effect->isOffloadable(),
-             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %x",
+             "addEffect_l() on offloaded thread %p: effect %s does not support offload flags %#x",
                     this, effect->desc().name, effect->desc().flags);
 
     if (chain == 0) {
@@ -3697,7 +3697,7 @@
         // mNormalSink below
 {
     ALOGV("MixerThread() id=%d device=%#x type=%d", id, device, type);
-    ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%d, mFrameSize=%zu, "
+    ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mFrameCount=%zu, mNormalFrameCount=%zu",
             mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount,
             mNormalFrameCount);
@@ -3765,7 +3765,7 @@
         NBAIO_Format origformat = format;
 #endif
         // adjust format to match that of the Fast Mixer
-        ALOGV("format changed from %d to %d", format.mFormat, fastMixerFormat);
+        ALOGV("format changed from %#x to %#x", format.mFormat, fastMixerFormat);
         format.mFormat = fastMixerFormat;
         format.mFrameSize = audio_bytes_per_sample(format.mFormat) * format.mChannelCount;
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index ae3dd08..68730a5 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -804,7 +804,8 @@
         flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_HW_AV_SYNC);
     }
 
-    ALOGV("getOutputForAttr() device 0x%x, sampling rate %d, format %x, channel mask %x, flags %x",
+    ALOGV("getOutputForAttr() device 0x%x, sampling rate %d, format %#x, channel mask %#x, "
+          "flags %#x",
           device, config->sample_rate, config->format, config->channel_mask, flags);
 
     *output = getOutputForDevice(device, session, *stream, config, flags);
@@ -972,7 +973,7 @@
         output = selectOutput(outputs, flags, config->format);
     }
     ALOGW_IF((output == 0), "getOutputForDevice() could not find output for stream %d, "
-            "sampling rate %d, format %d, channels %x, flags %x",
+            "sampling rate %d, format %#x, channels %#x, flags %#x",
             stream, config->sample_rate, config->format, config->channel_mask, flags);
 
     return output;
@@ -1401,7 +1402,7 @@
                                              input_type_t *inputType,
                                              audio_port_handle_t *portId)
 {
-    ALOGV("getInputForAttr() source %d, sampling rate %d, format %d, channel mask %x,"
+    ALOGV("getInputForAttr() source %d, sampling rate %d, format %#x, channel mask %#x,"
             "session %d, flags %#x",
           attr->source, config->sample_rate, config->format, config->channel_mask, session, flags);
 
@@ -1705,7 +1706,7 @@
         !audio_formats_match(profileFormat, lConfig.format) ||
         (profileChannelMask != lConfig.channel_mask)) {
         ALOGW("getInputForAttr() failed opening input: sampling rate %d"
-              ", format %d, channel mask %x",
+              ", format %#x, channel mask %#x",
               profileSamplingRate, profileFormat, profileChannelMask);
         if (input != AUDIO_IO_HANDLE_NONE) {
             inputDesc->close();
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 7e0450d..eb8222c 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -25,51 +25,55 @@
     virtual ~AudioPolicyTestClient() = default;
 
     // AudioPolicyClientInterface Implementation
-    audio_module_handle_t loadHwModule(const char* /*name*/) override { return 0; }
+    audio_module_handle_t loadHwModule(const char* /*name*/) override {
+        return AUDIO_MODULE_HANDLE_NONE;
+    }
     status_t openOutput(audio_module_handle_t /*module*/,
                         audio_io_handle_t* /*output*/,
                         audio_config_t* /*config*/,
                         audio_devices_t* /*devices*/,
                         const String8& /*address*/,
                         uint32_t* /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override { return 0; }
+                        audio_output_flags_t /*flags*/) override { return NO_INIT; }
     audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
-                                          audio_io_handle_t /*output2*/) override { return 0; }
-    status_t closeOutput(audio_io_handle_t /*output*/) override { return 0; }
-    status_t suspendOutput(audio_io_handle_t /*output*/) override { return 0; }
-    status_t restoreOutput(audio_io_handle_t /*output*/) override { return 0; }
+                                          audio_io_handle_t /*output2*/) override {
+        return AUDIO_IO_HANDLE_NONE;
+    }
+    status_t closeOutput(audio_io_handle_t /*output*/) override { return NO_INIT; }
+    status_t suspendOutput(audio_io_handle_t /*output*/) override { return NO_INIT; }
+    status_t restoreOutput(audio_io_handle_t /*output*/) override { return NO_INIT; }
     status_t openInput(audio_module_handle_t /*module*/,
                        audio_io_handle_t* /*input*/,
                        audio_config_t* /*config*/,
                        audio_devices_t* /*device*/,
                        const String8& /*address*/,
                        audio_source_t /*source*/,
-                       audio_input_flags_t /*flags*/) override { return 0; }
-    status_t closeInput(audio_io_handle_t /*input*/) override { return 0; }
+                       audio_input_flags_t /*flags*/) override { return NO_INIT; }
+    status_t closeInput(audio_io_handle_t /*input*/) override { return NO_INIT; }
     status_t setStreamVolume(audio_stream_type_t /*stream*/,
                              float /*volume*/,
                              audio_io_handle_t /*output*/,
-                             int /*delayMs*/) override { return 0; }
-    status_t invalidateStream(audio_stream_type_t /*stream*/) override { return 0; }
+                             int /*delayMs*/) override { return NO_INIT; }
+    status_t invalidateStream(audio_stream_type_t /*stream*/) override { return NO_INIT; }
     void setParameters(audio_io_handle_t /*ioHandle*/,
                        const String8& /*keyValuePairs*/,
                        int /*delayMs*/) override { }
     String8 getParameters(audio_io_handle_t /*ioHandle*/,
                           const String8& /*keys*/) override { return String8(); }
     status_t startTone(audio_policy_tone_t /*tone*/,
-                       audio_stream_type_t /*stream*/) override { return 0; }
-    status_t stopTone() override { return 0; }
-    status_t setVoiceVolume(float /*volume*/, int /*delayMs*/) override { return 0; }
+                       audio_stream_type_t /*stream*/) override { return NO_INIT; }
+    status_t stopTone() override { return NO_INIT; }
+    status_t setVoiceVolume(float /*volume*/, int /*delayMs*/) override { return NO_INIT; }
     status_t moveEffects(audio_session_t /*session*/,
                          audio_io_handle_t /*srcOutput*/,
-                         audio_io_handle_t /*dstOutput*/) override { return 0; }
+                         audio_io_handle_t /*dstOutput*/) override { return NO_INIT; }
     status_t createAudioPatch(const struct audio_patch* /*patch*/,
                               audio_patch_handle_t* /*handle*/,
-                              int /*delayMs*/) override { return 0; }
+                              int /*delayMs*/) override { return NO_INIT; }
     status_t releaseAudioPatch(audio_patch_handle_t /*handle*/,
-                               int /*delayMs*/) override { return 0; }
+                               int /*delayMs*/) override { return NO_INIT; }
     status_t setAudioPortConfig(const struct audio_port_config* /*config*/,
-                                int /*delayMs*/) override { return 0; }
+                                int /*delayMs*/) override { return NO_INIT; }
     void onAudioPortListUpdate() override { }
     void onAudioPatchListUpdate() override { }
     audio_unique_id_t newAudioUniqueId(audio_unique_id_use_t /*use*/) override { return 0; }
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index de26ab0..a9593b8 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -14,6 +14,9 @@
  * limitations under the License.
  */
 
+#include <memory>
+#include <set>
+
 #include <gtest/gtest.h>
 
 #include "AudioPolicyTestClient.h"
@@ -21,7 +24,7 @@
 
 using namespace android;
 
-TEST(AudioPolicyManager, InitFailure) {
+TEST(AudioPolicyManagerTestInit, Failure) {
     AudioPolicyTestClient client;
     AudioPolicyTestManager manager(&client);
     manager.getConfig().setDefault();
@@ -32,12 +35,13 @@
 }
 
 
-// A client that provides correct module and IO handles for inputs and outputs.
-class AudioPolicyTestClientWithModulesIoHandles : public AudioPolicyTestClient {
+class AudioPolicyManagerTestClient : public AudioPolicyTestClient {
   public:
+    // AudioPolicyClientInterface implementation
     audio_module_handle_t loadHwModule(const char* /*name*/) override {
-        return mNextModule++;
+        return mNextModuleHandle++;
     }
+
     status_t openOutput(audio_module_handle_t module,
                         audio_io_handle_t* output,
                         audio_config_t* /*config*/,
@@ -45,14 +49,15 @@
                         const String8& /*address*/,
                         uint32_t* /*latencyMs*/,
                         audio_output_flags_t /*flags*/) override {
-        if (module >= mNextModule) {
+        if (module >= mNextModuleHandle) {
             ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
-                    __func__, module, mNextModule);
+                    __func__, module, mNextModuleHandle);
             return BAD_VALUE;
         }
         *output = mNextIoHandle++;
         return NO_ERROR;
     }
+
     status_t openInput(audio_module_handle_t module,
                        audio_io_handle_t* input,
                        audio_config_t* /*config*/,
@@ -60,23 +65,132 @@
                        const String8& /*address*/,
                        audio_source_t /*source*/,
                        audio_input_flags_t /*flags*/) override {
-        if (module >= mNextModule) {
+        if (module >= mNextModuleHandle) {
             ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
-                    __func__, module, mNextModule);
+                    __func__, module, mNextModuleHandle);
             return BAD_VALUE;
         }
         *input = mNextIoHandle++;
         return NO_ERROR;
     }
+
+    status_t createAudioPatch(const struct audio_patch* /*patch*/,
+                              audio_patch_handle_t* handle,
+                              int /*delayMs*/) override {
+        *handle = mNextPatchHandle++;
+        mActivePatches.insert(*handle);
+        return NO_ERROR;
+    }
+
+    status_t releaseAudioPatch(audio_patch_handle_t handle,
+                               int /*delayMs*/) override {
+        if (mActivePatches.erase(handle) != 1) {
+            if (handle >= mNextPatchHandle) {
+                ALOGE("%s: Patch handle %d has not been allocated yet (next is %d)",
+                        __func__, handle, mNextPatchHandle);
+            } else {
+                ALOGE("%s: Attempt to release patch %d twice", __func__, handle);
+            }
+            return BAD_VALUE;
+        }
+        return NO_ERROR;
+    }
+
+    // Helper methods for tests
+    size_t getActivePatchesCount() const { return mActivePatches.size(); }
+
   private:
-    audio_module_handle_t mNextModule = AUDIO_MODULE_HANDLE_NONE + 1;
+    audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
+    audio_patch_handle_t mNextPatchHandle = AUDIO_PATCH_HANDLE_NONE + 1;
+    std::set<audio_patch_handle_t> mActivePatches;
 };
 
-TEST(AudioPolicyManager, InitSuccess) {
-    AudioPolicyTestClientWithModulesIoHandles client;
-    AudioPolicyTestManager manager(&client);
-    manager.getConfig().setDefault();
-    ASSERT_EQ(NO_ERROR, manager.initialize());
-    ASSERT_EQ(NO_ERROR, manager.initCheck());
+class AudioPolicyManagerTest : public testing::Test {
+  protected:
+    virtual void SetUp();
+    virtual void TearDown();
+
+    std::unique_ptr<AudioPolicyManagerTestClient> mClient;
+    std::unique_ptr<AudioPolicyTestManager> mManager;
+};
+
+void AudioPolicyManagerTest::SetUp() {
+    mClient.reset(new AudioPolicyManagerTestClient);
+    mManager.reset(new AudioPolicyTestManager(mClient.get()));
+    mManager->getConfig().setDefault();
+    ASSERT_EQ(NO_ERROR, mManager->initialize());
+    ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
+
+void AudioPolicyManagerTest::TearDown() {
+    mManager.reset();
+    mClient.reset();
+}
+
+TEST_F(AudioPolicyManagerTest, InitSuccess) {
+    // SetUp must finish with no assertions.
+}
+
+TEST_F(AudioPolicyManagerTest, CreateAudioPatchFailure) {
+    audio_patch patch{};
+    audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+    const size_t patchCountBefore = mClient->getActivePatchesCount();
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(nullptr, &handle, 0));
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, nullptr, 0));
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
+    patch.num_sources = AUDIO_PATCH_PORTS_MAX + 1;
+    patch.num_sinks = 1;
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
+    patch.num_sources = 1;
+    patch.num_sinks = AUDIO_PATCH_PORTS_MAX + 1;
+    ASSERT_EQ(BAD_VALUE, mManager->createAudioPatch(&patch, &handle, 0));
+    patch.num_sources = 2;
+    patch.num_sinks = 1;
+    ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
+    patch = {};
+    patch.num_sources = 1;
+    patch.sources[0].role = AUDIO_PORT_ROLE_SINK;
+    patch.num_sinks = 1;
+    patch.sinks[0].role = AUDIO_PORT_ROLE_SINK;
+    ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
+    patch = {};
+    patch.num_sources = 1;
+    patch.sources[0].role = AUDIO_PORT_ROLE_SOURCE;
+    patch.num_sinks = 1;
+    patch.sinks[0].role = AUDIO_PORT_ROLE_SOURCE;
+    ASSERT_EQ(INVALID_OPERATION, mManager->createAudioPatch(&patch, &handle, 0));
+    // Verify that the handle is left unchanged.
+    ASSERT_EQ(AUDIO_PATCH_HANDLE_NONE, handle);
+    ASSERT_EQ(patchCountBefore, mClient->getActivePatchesCount());
+}
+
+TEST_F(AudioPolicyManagerTest, CreateAudioPatchFromMix) {
+    audio_patch patch{};
+    audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
+    uid_t uid = 42;
+    const size_t patchCountBefore = mClient->getActivePatchesCount();
+    patch.num_sources = 1;
+    {
+        auto& src = patch.sources[0];
+        src.role = AUDIO_PORT_ROLE_SOURCE;
+        src.type = AUDIO_PORT_TYPE_MIX;
+        src.id = mManager->getConfig().getAvailableInputDevices()[0]->getId();
+        // Note: these are the parameters of the output device.
+        src.sample_rate = 44100;
+        src.format = AUDIO_FORMAT_PCM_16_BIT;
+        src.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    }
+    patch.num_sinks = 1;
+    {
+        auto& sink = patch.sinks[0];
+        sink.role = AUDIO_PORT_ROLE_SINK;
+        sink.type = AUDIO_PORT_TYPE_DEVICE;
+        sink.id = mManager->getConfig().getDefaultOutputDevice()->getId();
+    }
+    ASSERT_EQ(NO_ERROR, mManager->createAudioPatch(&patch, &handle, uid));
+    ASSERT_NE(AUDIO_PATCH_HANDLE_NONE, handle);
+    ASSERT_EQ(patchCountBefore + 1, mClient->getActivePatchesCount());
+}
+
+// TODO: Add patch creation tests that involve already existing patch
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 1fbba58..aeaca48 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -78,7 +78,8 @@
     android.hardware.camera.provider@2.4 \
     android.hardware.camera.device@1.0 \
     android.hardware.camera.device@3.2 \
-    android.hardware.camera.device@3.3
+    android.hardware.camera.device@3.3 \
+    android.hardware.camera.device@3.4
 
 LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := libbinder libcamera_client libfmq
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 24b0816..5cbc158 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -121,6 +121,34 @@
     return submitRequestList(requestList, streaming, submitInfo);
 }
 
+binder::Status CameraDeviceClient::insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
+        SurfaceMap* outSurfaceMap,
+        Vector<int32_t>* outputStreamIds) {
+    int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
+
+    // Trying to submit request with surface that wasn't created
+    if (idx == NAME_NOT_FOUND) {
+        ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                " we have not called createStream on",
+                __FUNCTION__, mCameraIdStr.string());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "Request targets Surface that is not part of current capture session");
+    }
+
+    const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
+    if (outSurfaceMap->find(streamSurfaceId.streamId()) == outSurfaceMap->end()) {
+        (*outSurfaceMap)[streamSurfaceId.streamId()] = std::vector<size_t>();
+        outputStreamIds->push_back(streamSurfaceId.streamId());
+    }
+    (*outSurfaceMap)[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
+
+    ALOGV("%s: Camera %s: Appending output stream %d surface %d to request",
+            __FUNCTION__, mCameraIdStr.string(), streamSurfaceId.streamId(),
+            streamSurfaceId.surfaceId());
+
+    return binder::Status::ok();
+}
+
 binder::Status CameraDeviceClient::submitRequestList(
         const std::vector<hardware::camera2::CaptureRequest>& requests,
         bool streaming,
@@ -174,7 +202,7 @@
                    __FUNCTION__, mCameraIdStr.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                     "Request settings are empty");
-        } else if (request.mSurfaceList.isEmpty()) {
+        } else if (request.mSurfaceList.isEmpty() && request.mStreamIdxList.size() == 0) {
             ALOGE("%s: Camera %s: Requests must have at least one surface target. "
                     "Rejecting request.", __FUNCTION__, mCameraIdStr.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
@@ -193,31 +221,44 @@
          */
         SurfaceMap surfaceMap;
         Vector<int32_t> outputStreamIds;
-        for (const sp<Surface>& surface : request.mSurfaceList) {
-            if (surface == 0) continue;
+        if (request.mSurfaceList.size() > 0) {
+            for (sp<Surface> surface : request.mSurfaceList) {
+                if (surface == 0) continue;
 
-            sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
-            int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
-
-            // Trying to submit request with surface that wasn't created
-            if (idx == NAME_NOT_FOUND) {
-                ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
-                        " we have not called createStream on",
-                        __FUNCTION__, mCameraIdStr.string());
-                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        "Request targets Surface that is not part of current capture session");
+                sp<IGraphicBufferProducer> gbp = surface->getIGraphicBufferProducer();
+                res = insertGbpLocked(gbp, &surfaceMap, &outputStreamIds);
+                if (!res.isOk()) {
+                    return res;
+                }
             }
+        } else {
+            for (size_t i = 0; i < request.mStreamIdxList.size(); i++) {
+                int streamId = request.mStreamIdxList.itemAt(i);
+                int surfaceIdx = request.mSurfaceIdxList.itemAt(i);
 
-            const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
-            if (surfaceMap.find(streamSurfaceId.streamId()) == surfaceMap.end()) {
-                surfaceMap[streamSurfaceId.streamId()] = std::vector<size_t>();
-                outputStreamIds.push_back(streamSurfaceId.streamId());
+                ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
+                if (index < 0) {
+                    ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                            " we have not called createStream on: stream %d",
+                            __FUNCTION__, mCameraIdStr.string(), streamId);
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                            "Request targets Surface that is not part of current capture session");
+                }
+
+                const auto& gbps = mConfiguredOutputs.valueAt(index).getGraphicBufferProducers();
+                if ((size_t)surfaceIdx >= gbps.size()) {
+                    ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
+                            " we have not called createStream on: stream %d, surfaceIdx %d",
+                            __FUNCTION__, mCameraIdStr.string(), streamId, surfaceIdx);
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                            "Request targets Surface has invalid surface index");
+                }
+
+                res = insertGbpLocked(gbps[surfaceIdx], &surfaceMap, &outputStreamIds);
+                if (!res.isOk()) {
+                    return res;
+                }
             }
-            surfaceMap[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
-
-            ALOGV("%s: Camera %s: Appending output stream %d surface %d to request",
-                    __FUNCTION__, mCameraIdStr.string(), streamSurfaceId.streamId(),
-                    streamSurfaceId.surfaceId());
         }
 
         metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
@@ -318,7 +359,8 @@
     return binder::Status::ok();
 }
 
-binder::Status CameraDeviceClient::endConfigure(int operatingMode) {
+binder::Status CameraDeviceClient::endConfigure(int operatingMode,
+        const hardware::camera2::impl::CameraMetadataNative& sessionParams) {
     ATRACE_CALL();
     ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
             __FUNCTION__, mInputStream.configured ? 1 : 0,
@@ -364,7 +406,7 @@
         }
     }
 
-    status_t err = mDevice->configureStreams(operatingMode);
+    status_t err = mDevice->configureStreams(sessionParams, operatingMode);
     if (err == BAD_VALUE) {
         String8 msg = String8::format("Camera %s: Unsupported set of inputs/outputs provided",
                 mCameraIdStr.string());
@@ -439,6 +481,8 @@
                 mStreamMap.removeItem(surface);
             }
 
+            mConfiguredOutputs.removeItem(streamId);
+
             if (dIndex != NAME_NOT_FOUND) {
                 mDeferredStreams.removeItemsAt(dIndex);
             }
@@ -550,6 +594,7 @@
             i++;
         }
 
+        mConfiguredOutputs.add(streamId, outputConfiguration);
         mStreamInfoMap[streamId] = streamInfo;
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for output surface"
@@ -842,6 +887,8 @@
                     StreamSurfaceId(streamId, outputMap.valueAt(i)));
         }
 
+        mConfiguredOutputs.replaceValueFor(streamId, outputConfiguration);
+
         ALOGV("%s: Camera %s: Successful stream ID %d update",
                   __FUNCTION__, mCameraIdStr.string(), streamId);
     }
@@ -1412,6 +1459,7 @@
             mDeferredStreams.removeItemsAt(deferredStreamIndex);
         }
         mStreamInfoMap[streamId].finalized = true;
+        mConfiguredOutputs.replaceValueFor(streamId, outputConfiguration);
     } else if (err == NO_INIT) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Deferred surface is invalid: %s (%d)",
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index e1a11db..4086c72 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -85,7 +85,8 @@
 
     virtual binder::Status beginConfigure() override;
 
-    virtual binder::Status endConfigure(int operatingMode) override;
+    virtual binder::Status endConfigure(int operatingMode,
+            const hardware::camera2::impl::CameraMetadataNative& sessionParams) override;
 
     // Returns -EBUSY if device is not idle or in error state
     virtual binder::Status deleteStream(int streamId) override;
@@ -255,9 +256,18 @@
     binder::Status createSurfaceFromGbp(OutputStreamInfo& streamInfo, bool isStreamInfoValid,
             sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp);
 
+
+    // Utility method to insert the surface into SurfaceMap
+    binder::Status insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
+            /*out*/SurfaceMap* surfaceMap,
+            /*out*/Vector<int32_t>* streamIds);
+
     // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
     KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
 
+    // Stream ID -> OutputConfiguration. Used for looking up Surface by stream/surface index
+    KeyedVector<int32_t, hardware::camera2::params::OutputConfiguration> mConfiguredOutputs;
+
     struct InputStreamConfiguration {
         bool configured;
         int32_t width;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index f1f96c3..3fd6921 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -212,7 +212,8 @@
      * - BAD_VALUE if the set of streams was invalid (e.g. fmts or sizes)
      * - INVALID_OPERATION if the device was in the wrong state
      */
-    virtual status_t configureStreams(int operatingMode = 0) = 0;
+    virtual status_t configureStreams(const CameraMetadata& sessionParams,
+            int operatingMode = 0) = 0;
 
     // get the buffer producer of the input stream
     virtual status_t getInputBufferProducer(
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c0db8e7..d99fc1d 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -55,6 +55,8 @@
 #include "device3/Camera3SharedOutputStream.h"
 #include "CameraService.h"
 
+#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
+
 using namespace android::camera3;
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
@@ -1102,7 +1104,7 @@
     if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
         // This point should only be reached via API1 (API2 must explicitly call configureStreams)
         // so unilaterally select normal operating mode.
-        res = configureStreamsLocked(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE);
+        res = configureStreamsLocked(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE, mSessionParams);
         // Stream configuration failed. Client might try other configuraitons.
         if (res != OK) {
             CLOGE("Can't set up streams: %s (%d)", strerror(-res), res);
@@ -1205,8 +1207,8 @@
     // Continue captures if active at start
     if (wasActive) {
         ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
-        // Reuse current operating mode for new stream config
-        res = configureStreamsLocked(mOperatingMode);
+        // Reuse current operating mode and session parameters for new stream config
+        res = configureStreamsLocked(mOperatingMode, mSessionParams);
         if (res != OK) {
             ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
                     __FUNCTION__, mNextStreamId, strerror(-res), res);
@@ -1360,8 +1362,8 @@
     // Continue captures if active at start
     if (wasActive) {
         ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
-        // Reuse current operating mode for new stream config
-        res = configureStreamsLocked(mOperatingMode);
+        // Reuse current operating mode and session parameters for new stream config
+        res = configureStreamsLocked(mOperatingMode, mSessionParams);
         if (res != OK) {
             CLOGE("Can't reconfigure device for new stream %d: %s (%d)",
                     mNextStreamId, strerror(-res), res);
@@ -1499,14 +1501,29 @@
     return res;
 }
 
-status_t Camera3Device::configureStreams(int operatingMode) {
+status_t Camera3Device::configureStreams(const CameraMetadata& sessionParams, int operatingMode) {
     ATRACE_CALL();
     ALOGV("%s: E", __FUNCTION__);
 
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    return configureStreamsLocked(operatingMode);
+    //Filter out any incoming session parameters
+    const CameraMetadata params(sessionParams);
+    CameraMetadata filteredParams;
+    camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
+            ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
+    if (availableSessionKeys.count > 0) {
+        for (size_t i = 0; i < availableSessionKeys.count; i++) {
+            camera_metadata_ro_entry entry = params.find(
+                    availableSessionKeys.data.i32[i]);
+            if (entry.count > 0) {
+                filteredParams.update(entry);
+            }
+        }
+    }
+
+    return configureStreamsLocked(operatingMode, filteredParams);
 }
 
 status_t Camera3Device::getInputBufferProducer(
@@ -2188,7 +2205,8 @@
     mNeedConfig = true;
 }
 
-status_t Camera3Device::configureStreamsLocked(int operatingMode) {
+status_t Camera3Device::configureStreamsLocked(int operatingMode,
+        const CameraMetadata& sessionParams) {
     ATRACE_CALL();
     status_t res;
 
@@ -2272,7 +2290,9 @@
     // Do the HAL configuration; will potentially touch stream
     // max_buffers, usage, priv fields.
 
-    res = mInterface->configureStreams(&config);
+    const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
+    res = mInterface->configureStreams(sessionBuffer, &config);
+    sessionParams.unlock(sessionBuffer);
 
     if (res == BAD_VALUE) {
         // HAL rejected this set of streams as unsupported, clean up config
@@ -2337,6 +2357,14 @@
     }
 
     // Update device state
+    const camera_metadata_t *newSessionParams = sessionParams.getAndLock();
+    const camera_metadata_t *currentSessionParams = mSessionParams.getAndLock();
+    bool updateSessionParams = (newSessionParams != currentSessionParams) ? true : false;
+    sessionParams.unlock(newSessionParams);
+    mSessionParams.unlock(currentSessionParams);
+    if (updateSessionParams)  {
+        mSessionParams = sessionParams;
+    }
 
     mNeedConfig = false;
 
@@ -3224,17 +3252,18 @@
     return res;
 }
 
-status_t Camera3Device::HalInterface::configureStreams(camera3_stream_configuration *config) {
+status_t Camera3Device::HalInterface::configureStreams(const camera_metadata_t *sessionParams,
+        camera3_stream_configuration *config) {
     ATRACE_NAME("CameraHal::configureStreams");
     if (!valid()) return INVALID_OPERATION;
     status_t res = OK;
 
     // Convert stream config to HIDL
     std::set<int> activeStreams;
-    StreamConfiguration requestedConfiguration;
-    requestedConfiguration.streams.resize(config->num_streams);
+    device::V3_4::StreamConfiguration requestedConfiguration;
+    requestedConfiguration.v3_2.streams.resize(config->num_streams);
     for (size_t i = 0; i < config->num_streams; i++) {
-        Stream &dst = requestedConfiguration.streams[i];
+        Stream &dst = requestedConfiguration.v3_2.streams[i];
         camera3_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
@@ -3281,29 +3310,50 @@
 
     res = mapToStreamConfigurationMode(
             (camera3_stream_configuration_mode_t) config->operation_mode,
-            /*out*/ &requestedConfiguration.operationMode);
+            /*out*/ &requestedConfiguration.v3_2.operationMode);
     if (res != OK) {
         return res;
     }
 
+    requestedConfiguration.sessionParams.setToExternal(
+            reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
+            get_camera_metadata_size(sessionParams));
+
     // Invoke configureStreams
 
     device::V3_3::HalStreamConfiguration finalConfiguration;
     common::V1_0::Status status;
 
-    // See if we have v3.3 HAL
+    // See if we have v3.4 or v3.3 HAL
+    sp<device::V3_4::ICameraDeviceSession> hidlSession_3_4;
     sp<device::V3_3::ICameraDeviceSession> hidlSession_3_3;
-    auto castResult = device::V3_3::ICameraDeviceSession::castFrom(mHidlSession);
-    if (castResult.isOk()) {
-        hidlSession_3_3 = castResult;
+    auto castResult_3_4 = device::V3_4::ICameraDeviceSession::castFrom(mHidlSession);
+    if (castResult_3_4.isOk()) {
+        hidlSession_3_4 = castResult_3_4;
     } else {
-        ALOGE("%s: Transaction error when casting ICameraDeviceSession: %s", __FUNCTION__,
-                castResult.description().c_str());
+        auto castResult_3_3 = device::V3_3::ICameraDeviceSession::castFrom(mHidlSession);
+        if (castResult_3_3.isOk()) {
+            hidlSession_3_3 = castResult_3_3;
+        }
     }
-    if (hidlSession_3_3 != nullptr) {
+
+    if (hidlSession_3_4 != nullptr) {
+        // We do; use v3.4 for the call
+        ALOGV("%s: v3.4 device found", __FUNCTION__);
+        auto err = hidlSession_3_4->configureStreams_3_4(requestedConfiguration,
+            [&status, &finalConfiguration]
+            (common::V1_0::Status s, const device::V3_3::HalStreamConfiguration& halConfiguration) {
+                finalConfiguration = halConfiguration;
+                status = s;
+            });
+        if (!err.isOk()) {
+            ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str());
+            return DEAD_OBJECT;
+        }
+    } else if (hidlSession_3_3 != nullptr) {
         // We do; use v3.3 for the call
         ALOGV("%s: v3.3 device found", __FUNCTION__);
-        auto err = hidlSession_3_3->configureStreams_3_3(requestedConfiguration,
+        auto err = hidlSession_3_3->configureStreams_3_3(requestedConfiguration.v3_2,
             [&status, &finalConfiguration]
             (common::V1_0::Status s, const device::V3_3::HalStreamConfiguration& halConfiguration) {
                 finalConfiguration = halConfiguration;
@@ -3317,7 +3367,7 @@
         // We don't; use v3.2 call and construct a v3.3 HalStreamConfiguration
         ALOGV("%s: v3.2 device found", __FUNCTION__);
         HalStreamConfiguration finalConfiguration_3_2;
-        auto err = mHidlSession->configureStreams(requestedConfiguration,
+        auto err = mHidlSession->configureStreams(requestedConfiguration.v3_2,
                 [&status, &finalConfiguration_3_2]
                 (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
                     finalConfiguration_3_2 = halConfiguration;
@@ -3331,7 +3381,7 @@
         for (size_t i = 0; i < finalConfiguration_3_2.streams.size(); i++) {
             finalConfiguration.streams[i].v3_2 = finalConfiguration_3_2.streams[i];
             finalConfiguration.streams[i].overrideDataSpace =
-                    requestedConfiguration.streams[i].dataSpace;
+                    requestedConfiguration.v3_2.streams[i].dataSpace;
         }
     }
 
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index e9466ab..cc7eb35 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -138,7 +138,8 @@
 
     status_t deleteStream(int id) override;
 
-    status_t configureStreams(int operatingMode =
+    status_t configureStreams(const CameraMetadata& sessionParams,
+            int operatingMode =
             static_cast<int>(hardware::camera::device::V3_2::StreamConfigurationMode::NORMAL_MODE))
             override;
     status_t getInputBufferProducer(
@@ -236,6 +237,9 @@
 
     // Current stream configuration mode;
     int                        mOperatingMode;
+    // Current session wide parameters
+    hardware::camera2::impl::CameraMetadataNative mSessionParams;
+
     // Constant to use for no set operating mode
     static const int           NO_MODE = -1;
 
@@ -272,7 +276,8 @@
         // Caller takes ownership of requestTemplate
         status_t constructDefaultRequestSettings(camera3_request_template_t templateId,
                 /*out*/ camera_metadata_t **requestTemplate);
-        status_t configureStreams(/*inout*/ camera3_stream_configuration *config);
+        status_t configureStreams(const camera_metadata_t *sessionParams,
+                /*inout*/ camera3_stream_configuration *config);
         status_t processCaptureRequest(camera3_capture_request_t *request);
         status_t processBatchCaptureRequests(
                 std::vector<camera3_capture_request_t*>& requests,
@@ -550,7 +555,8 @@
      * Take the currently-defined set of streams and configure the HAL to use
      * them. This is a long-running operation (may be several hundered ms).
      */
-    status_t           configureStreamsLocked(int operatingMode);
+    status_t           configureStreamsLocked(int operatingMode,
+            const CameraMetadata& sessionParams);
 
     /**
      * Cancel stream configuration that did not finish successfully.