Merge "Camera: Support querying session config with parameters" into main
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.h b/media/libaaudio/src/binding/AAudioBinderClient.h
index 8faf6e8..66d3295 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.h
+++ b/media/libaaudio/src/binding/AAudioBinderClient.h
@@ -203,12 +203,12 @@
 
 private:
     android::Mutex                          mServiceLock;
-    std::shared_ptr<AAudioServiceInterface> mAdapter;
+    std::shared_ptr<AAudioServiceInterface> mAdapter PT_GUARDED_BY(mServiceLock);
     android::sp<AAudioClient>               mAAudioClient;
 
-    std::shared_ptr<AAudioServiceInterface> getAAudioService();
+    std::shared_ptr<AAudioServiceInterface> getAAudioService() EXCLUDES(mServiceLock);
 
-    void dropAAudioService();
+    void dropAAudioService() EXCLUDES(mServiceLock);
 
 };
 
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index 01dd05a..2c23e1d 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -117,8 +117,6 @@
     bool                    mFreeRunning{false};
     android::fifo_counter_t mDataReadCounter{0}; // only used if free-running
     android::fifo_counter_t mDataWriteCounter{0}; // only used if free-running
-
-    std::mutex mDataQueueLock;
 };
 
 } // namespace aaudio
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index f2f5cac..49a63c4 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -172,7 +172,7 @@
         return createThread_l(periodNanoseconds, threadProc, threadArg);
     }
 
-    aaudio_result_t joinThread(void **returnArg);
+    aaudio_result_t joinThread(void **returnArg) EXCLUDES(mStreamLock);
 
     virtual aaudio_result_t registerThread() {
         return AAUDIO_OK;
@@ -424,7 +424,7 @@
     }
 
     // This is used by the AudioManager to duck and mute the stream when changing audio focus.
-    void setDuckAndMuteVolume(float duckAndMuteVolume);
+    void setDuckAndMuteVolume(float duckAndMuteVolume) EXCLUDES(mStreamLock);
 
     float getDuckAndMuteVolume() const {
         return mDuckAndMuteVolume;
@@ -459,11 +459,11 @@
         mPlayerBase->unregisterWithAudioManager();
     }
 
-    aaudio_result_t systemStart();
+    aaudio_result_t systemStart() EXCLUDES(mStreamLock);
 
-    aaudio_result_t systemPause();
+    aaudio_result_t systemPause() EXCLUDES(mStreamLock);
 
-    aaudio_result_t safeFlush();
+    aaudio_result_t safeFlush() EXCLUDES(mStreamLock);
 
     /**
      * This is called when an app calls AAudioStream_requestStop();
@@ -474,14 +474,14 @@
     /**
      * This is called internally when an app callback returns AAUDIO_CALLBACK_RESULT_STOP.
      */
-    aaudio_result_t systemStopInternal();
+    aaudio_result_t systemStopInternal() EXCLUDES(mStreamLock);
 
     /**
      * Safely RELEASE a stream after taking mStreamLock and checking
      * to make sure we are not being called from a callback.
      * @return AAUDIO_OK or a negative error
      */
-    aaudio_result_t safeRelease();
+    aaudio_result_t safeRelease() EXCLUDES(mStreamLock);
 
     /**
      * Safely RELEASE and CLOSE a stream after taking mStreamLock and checking
@@ -490,7 +490,7 @@
      */
     aaudio_result_t safeReleaseClose();
 
-    aaudio_result_t safeReleaseCloseInternal();
+    aaudio_result_t safeReleaseCloseInternal() EXCLUDES(mStreamLock);
 
 protected:
 
diff --git a/media/libaudiofoundation/include/media/AudioPort.h b/media/libaudiofoundation/include/media/AudioPort.h
index 77e58ed..5786f7f 100644
--- a/media/libaudiofoundation/include/media/AudioPort.h
+++ b/media/libaudiofoundation/include/media/AudioPort.h
@@ -48,7 +48,10 @@
     audio_port_role_t getRole() const { return mRole; }
 
     virtual void setFlags(uint32_t flags);
-    uint32_t getFlags() const { return useInputChannelMask() ? mFlags.input : mFlags.output; }
+    uint32_t getFlags() const {
+        return useInputChannelMask() ? static_cast<uint32_t>(mFlags.input)
+                                     : static_cast<uint32_t>(mFlags.output);
+    }
 
     void setGains(const AudioGains &gains) { mGains = gains; }
     const AudioGains &getGains() const { return mGains; }
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index f5fa16e..ec72ee3 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -143,6 +143,7 @@
           .setControlAwbRegions({kDefaultEmptyControlRegion})
           .setControlAeCompensationRange(0, 1)
           .setControlAeCompensationStep(camera_metadata_rational_t{0, 1})
+          .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
           .setMaxJpegSize(kMaxJpegSize)
           .setAvailableRequestKeys({ANDROID_CONTROL_AF_MODE})
           .setAvailableResultKeys({ANDROID_CONTROL_AF_MODE})
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 8a2db1c..8621160 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -24,6 +24,7 @@
 #include <mutex>
 #include <thread>
 
+#include "GLES/gl.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
@@ -271,9 +272,9 @@
     }
 
     auto status = streamConfig->format == PixelFormat::BLOB
-                      ? renderIntoBlobStreamBuffer(
-                            reqBuffer.getStreamId(), reqBuffer.getBufferId(),
-                            streamConfig->bufferSize, reqBuffer.getFence())
+                      ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
+                                                   reqBuffer.getBufferId(),
+                                                   reqBuffer.getFence())
                       : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
                                                     reqBuffer.getBufferId(),
                                                     reqBuffer.getFence());
@@ -354,17 +355,21 @@
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
-    const int streamId, const int bufferId, const size_t bufferSize,
-    sp<Fence> fence) {
+    const int streamId, const int bufferId, sp<Fence> fence) {
   ALOGV("%s", __func__);
-  sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
-  if (gBuffer == nullptr) {
-    // Most probably nothing was yet written to input surface if we reached this.
-    ALOGE("%s: Cannot fetch most recent buffer from SurfaceTexture", __func__);
-    return cameraStatus(Status::INTERNAL_ERROR);
-  }
   std::shared_ptr<AHardwareBuffer> hwBuffer =
       mSessionContext.fetchHardwareBuffer(streamId, bufferId);
+  if (hwBuffer == nullptr) {
+    ALOGE("%s: Failed to fetch hardware buffer %d for streamId %d", __func__,
+          bufferId, streamId);
+    return cameraStatus(Status::INTERNAL_ERROR);
+  }
+
+  std::optional<Stream> stream = mSessionContext.getStreamConfig(streamId);
+  if (!stream.has_value()) {
+    ALOGE("%s, failed to fetch information about stream %d", __func__, streamId);
+    return cameraStatus(Status::INTERNAL_ERROR);
+  }
 
   AHardwareBuffer_Planes planes_info;
 
@@ -377,27 +382,36 @@
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
-  android_ycbcr ycbcr;
-  status_t status =
-      gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
-  ALOGV("Locked buffers");
-  if (status != NO_ERROR) {
-    AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-    ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
-    return cameraStatus(Status::INTERNAL_ERROR);
-  }
+  sp<GraphicBuffer> gBuffer = mEglSurfaceTexture->getCurrentBuffer();
+  bool compressionSuccess = true;
+  if (gBuffer != nullptr) {
+    android_ycbcr ycbcr;
+    status_t status =
+        gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
+    ALOGV("Locked buffers");
+    if (status != NO_ERROR) {
+      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
+      ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
+      return cameraStatus(Status::INTERNAL_ERROR);
+    }
 
-  bool success = compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
-                              bufferSize, planes_info.planes[0].data);
+    compressionSuccess =
+        compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
+                     stream->bufferSize, planes_info.planes[0].data);
 
-  status_t res = gBuffer->unlock();
-  if (res != NO_ERROR) {
-    ALOGE("Failed to unlock graphic buffer: %d", res);
+    status_t res = gBuffer->unlock();
+    if (res != NO_ERROR) {
+      ALOGE("Failed to unlock graphic buffer: %d", res);
+    }
+  } else {
+    compressionSuccess =
+        compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
+                          planes_info.planes[0].data);
   }
   AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
   ALOGV("Unlocked buffers");
-  return success ? ndk::ScopedAStatus::ok()
-                 : cameraStatus(Status::INTERNAL_ERROR);
+  return compressionSuccess ? ndk::ScopedAStatus::ok()
+                            : cameraStatus(Status::INTERNAL_ERROR);
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
@@ -435,7 +449,15 @@
   mEglDisplayContext->makeCurrent();
   framebuffer->beforeDraw();
 
-  mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
+  if (mEglSurfaceTexture->getCurrentBuffer() == nullptr) {
+    // If there's no current buffer, nothing was written to the surface and
+    // texture is not initialized yet. Let's render the framebuffer black
+    // instead of rendering the texture.
+    glClearColor(0.0f, 0.5f, 0.5f, 0.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+  } else {
+    mEglTextureProgram->draw(mEglSurfaceTexture->updateTexture());
+  }
   framebuffer->afterDraw();
 
   const std::chrono::nanoseconds after =
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 30de7c2..a4374e6 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -126,7 +126,6 @@
   // Always called on render thread.
   ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
                                                 const int bufferId,
-                                                const size_t bufferSize,
                                                 sp<Fence> fence = nullptr);
 
   // Render current image to the YCbCr buffer.
diff --git a/services/camera/virtualcamera/tests/EglUtilTest.cc b/services/camera/virtualcamera/tests/EglUtilTest.cc
index d387ebf..d0b7218 100644
--- a/services/camera/virtualcamera/tests/EglUtilTest.cc
+++ b/services/camera/virtualcamera/tests/EglUtilTest.cc
@@ -31,28 +31,12 @@
 namespace virtualcamera {
 namespace {
 
-using ::testing::Eq;
-using ::testing::NotNull;
+using ::testing::IsNull;
 
 constexpr int kWidth = 64;
 constexpr int kHeight = 64;
 constexpr char kGlExtYuvTarget[] = "GL_EXT_YUV_target";
 
-uint8_t getY(const android_ycbcr& ycbcr, const int x, const int y) {
-    uint8_t* yPtr = reinterpret_cast<uint8_t*>(ycbcr.y);
-    return *(yPtr + ycbcr.ystride * y + x);
-}
-
-uint8_t getCb(const android_ycbcr& ycbcr, const int x, const int y) {
-    uint8_t* cbPtr = reinterpret_cast<uint8_t*>(ycbcr.cb);
-    return *(cbPtr + ycbcr.cstride * (y / 2) + (x / 2) * ycbcr.chroma_step);
-}
-
-uint8_t getCr(const android_ycbcr& ycbcr, const int x, const int y) {
-    uint8_t* crPtr = reinterpret_cast<uint8_t*>(ycbcr.cr);
-    return *(crPtr + ycbcr.cstride * (y / 2) + (x / 2) * ycbcr.chroma_step);
-}
-
 TEST(EglDisplayContextTest, SuccessfulInitialization) {
   EglDisplayContext displayContext;
 
@@ -88,7 +72,7 @@
   EXPECT_TRUE(eglTextureProgram.isInitialized());
 }
 
-TEST_F(EglTest, EglSurfaceTextureBlackAfterInit) {
+TEST_F(EglTest, EglSurfaceCurrentBufferNullAfterInit) {
   if (!isGlExtensionSupported(kGlExtYuvTarget)) {
       GTEST_SKIP() << "Skipping test because of missing required GL extension " << kGlExtYuvTarget;
   }
@@ -97,24 +81,7 @@
   surfaceTexture.updateTexture();
   sp<GraphicBuffer> buffer = surfaceTexture.getCurrentBuffer();
 
-  ASSERT_THAT(buffer, NotNull());
-  const int width = buffer->getWidth();
-  const int height = buffer->getHeight();
-  ASSERT_THAT(width, Eq(kWidth));
-  ASSERT_THAT(height, Eq(kHeight));
-
-  android_ycbcr ycbcr;
-  status_t ret = buffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
-  ASSERT_THAT(ret, Eq(NO_ERROR));
-  for (int i = 0; i < width; ++i) {
-      for (int j = 0; j < height; ++j) {
-          EXPECT_THAT(getY(ycbcr, i, j), Eq(0x00));
-          EXPECT_THAT(getCb(ycbcr, i, j), Eq(0x7f));
-          EXPECT_THAT(getCr(ycbcr, i, j), Eq(0x7f));
-      }
-  }
-
-  buffer->unlock();
+  EXPECT_THAT(buffer, IsNull());
 }
 
 }  // namespace
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 266d65a..5b479c0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -31,26 +31,6 @@
 namespace companion {
 namespace virtualcamera {
 
-namespace {
-
-void submitBlackBufferYCbCr420(Surface& surface) {
-    ANativeWindow_Buffer buffer;
-
-    int ret = surface.lock(&buffer, nullptr);
-    if (ret != NO_ERROR) {
-        ALOGE("%s: Cannot lock output surface: %d", __func__, ret);
-        return;
-    }
-    uint8_t* data = reinterpret_cast<uint8_t*>(buffer.bits);
-    const int yPixNr = buffer.width * buffer.height;
-    const int uvPixNr = (buffer.width / 2) * (buffer.height / 2);
-    memset(data, 0x00, yPixNr);
-    memset(data + yPixNr, 0x7f, 2 * uvPixNr);
-    surface.unlockAndPost();
-}
-
-}  // namespace
-
 EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
     : mWidth(width), mHeight(height) {
   glGenTextures(1, &mTextureId);
@@ -67,14 +47,6 @@
   mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
 
   mSurface = sp<Surface>::make(mBufferProducer);
-  // Submit black buffer to the surface to make sure there's input buffer
-  // to process in case capture request comes before client writes something
-  // to the surface.
-  //
-  // Note that if the client does write something before capture request is
-  // processed (& updateTexture is called), this black buffer will be
-  // skipped (and recycled).
-  submitBlackBufferYCbCr420(*mSurface);
 }
 
 EglSurfaceTexture::~EglSurfaceTexture() {
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 6f10376..2b19c13 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -20,6 +20,7 @@
 #include <cstddef>
 #include <cstdint>
 #include <memory>
+#include <vector>
 
 #include "android/hardware_buffer.h"
 #include "jpeglib.h"
@@ -37,10 +38,9 @@
 
 class LibJpegContext {
  public:
-  LibJpegContext(int width, int height, const android_ycbcr& ycbcr,
-                 const size_t outBufferSize, void* outBuffer)
-      : mYCbCr(ycbcr),
-        mWidth(width),
+  LibJpegContext(int width, int height, const size_t outBufferSize,
+                 void* outBuffer)
+      : mWidth(width),
         mHeight(height),
         mDstBufferSize(outBufferSize),
         mDstBuffer(outBuffer) {
@@ -94,15 +94,15 @@
     mCompressStruct.comp_info[2].v_samp_factor = 1;
   }
 
-  bool compress() {
+  bool compress(const android_ycbcr& ycbr) {
     // Prepare arrays of pointers to scanlines of each plane.
     std::vector<JSAMPROW> yLines(mHeight);
     std::vector<JSAMPROW> cbLines(mHeight / 2);
     std::vector<JSAMPROW> crLines(mHeight / 2);
 
-    uint8_t* y = static_cast<uint8_t*>(mYCbCr.y);
-    uint8_t* cb = static_cast<uint8_t*>(mYCbCr.cb);
-    uint8_t* cr = static_cast<uint8_t*>(mYCbCr.cr);
+    uint8_t* y = static_cast<uint8_t*>(ycbr.y);
+    uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
+    uint8_t* cr = static_cast<uint8_t*>(ycbr.cr);
 
     // Since UV samples might be interleaved (semiplanar) we need to copy
     // them to separate planes, since libjpeg doesn't directly
@@ -115,20 +115,59 @@
     for (int i = 0; i < c_samples; ++i) {
       cb_plane[i] = *cb;
       cr_plane[i] = *cr;
-      cb += mYCbCr.chroma_step;
-      cr += mYCbCr.chroma_step;
+      cb += ycbr.chroma_step;
+      cr += ycbr.chroma_step;
     }
 
     // Collect pointers to individual scanline of each plane.
     for (int i = 0; i < mHeight; ++i) {
-      yLines[i] = y + i * mYCbCr.ystride;
+      yLines[i] = y + i * ycbr.ystride;
     }
     for (int i = 0; i < (mHeight / 2); ++i) {
       cbLines[i] = cb_plane.data() + i * (mWidth / 2);
       crLines[i] = cr_plane.data() + i * (mWidth / 2);
     }
 
-    // Perform actual compression.
+    return compress(yLines, cbLines, crLines);
+  }
+
+  bool compressBlackImage() {
+    // We only really need to prepare one scanline for Y and one shared scanline
+    // for Cb & Cr.
+    std::vector<uint8_t> yLine(mWidth, 0);
+    std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
+
+    std::vector<JSAMPROW> yLines(mHeight, yLine.data());
+    std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
+
+    return compress(yLines, cLines, cLines);
+  }
+
+ private:
+  void setSuccess(const boolean success) {
+    mSuccess = success;
+  }
+
+  void initDestination() {
+    mDestinationMgr.next_output_byte = reinterpret_cast<JOCTET*>(mDstBuffer);
+    mDestinationMgr.free_in_buffer = mDstBufferSize;
+    ALOGV("%s:%d jpeg start: %p [%zu]", __FUNCTION__, __LINE__, mDstBuffer,
+          mDstBufferSize);
+  }
+
+  void termDestination() {
+    mEncodedSize = mDstBufferSize - mDestinationMgr.free_in_buffer;
+    ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, mEncodedSize);
+  }
+
+  // Perform actual compression.
+  //
+  // Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
+  // each vector needs to correspond to height of corresponding plane.
+  //
+  // Returns true if compression is successful, false otherwise.
+  bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
+                std::vector<JSAMPROW>& crLines) {
     jpeg_start_compress(&mCompressStruct, TRUE);
 
     while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
@@ -149,23 +188,6 @@
     return mSuccess;
   }
 
- private:
-  void setSuccess(const boolean success) {
-    mSuccess = success;
-  }
-
-  void initDestination() {
-    mDestinationMgr.next_output_byte = reinterpret_cast<JOCTET*>(mDstBuffer);
-    mDestinationMgr.free_in_buffer = mDstBufferSize;
-    ALOGV("%s:%d jpeg start: %p [%zu]", __FUNCTION__, __LINE__, mDstBuffer,
-          mDstBufferSize);
-  }
-
-  void termDestination() {
-    mEncodedSize = mDstBufferSize - mDestinationMgr.free_in_buffer;
-    ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, mEncodedSize);
-  }
-
   // === libjpeg callbacks below ===
 
   static void onOutputError(j_common_ptr cinfo) {
@@ -195,9 +217,6 @@
   jpeg_error_mgr mErrorMgr;
   jpeg_destination_mgr mDestinationMgr;
 
-  // Layout of the input image.
-  android_ycbcr mYCbCr;
-
   // Dimensions of the input image.
   int mWidth;
   int mHeight;
@@ -216,11 +235,15 @@
 
 }  // namespace
 
-// Returns true if the EGL is in an error state and logs the error.
 bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
                   size_t outBufferSize, void* outBuffer) {
-  return LibJpegContext(width, height, ycbcr, outBufferSize, outBuffer)
-      .compress();
+  return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+}
+
+bool compressBlackJpeg(int width, int height, size_t outBufferSize,
+                       void* outBuffer) {
+  return LibJpegContext(width, height, outBufferSize, outBuffer)
+      .compressBlackImage();
 }
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index 8bff008..c44d0a8 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -27,9 +27,15 @@
 namespace virtualcamera {
 
 // Jpeg-compress image into the output buffer.
+// Returns true if the compression was successful, false otherwise.
 bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
                   size_t outBufferSize, void* outBuffer);
 
+// Jpeg-compress all-black image into the output buffer.
+// Returns true if the compression was successful, false otherwise.
+bool compressBlackJpeg(int width, int height, size_t outBufferSize,
+                       void* outBuffer);
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataBuilder.cc
index b3b1a26..92a48b9 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ b/services/camera/virtualcamera/util/MetadataBuilder.cc
@@ -233,6 +233,12 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
+                                                           const float max) {
+  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
                                                            int x1, int y1) {
   mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataBuilder.h
index 2124398..d992d31 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ b/services/camera/virtualcamera/util/MetadataBuilder.h
@@ -151,6 +151,9 @@
   // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
   MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
 
+  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlZoomRatioRange(float min, float max);
+
   // A list of all keys that the camera device has available to use with
   // CaptureRequest.
   //
diff --git a/services/oboeservice/AAudioClientTracker.h b/services/oboeservice/AAudioClientTracker.h
index cd3b75a..3d4ab34 100644
--- a/services/oboeservice/AAudioClientTracker.h
+++ b/services/oboeservice/AAudioClientTracker.h
@@ -110,7 +110,7 @@
     private:
         mutable std::mutex                              mLock;
         const pid_t                                     mProcessId;
-        std::set<android::sp<AAudioServiceStreamBase>>  mStreams;
+        std::set<android::sp<AAudioServiceStreamBase>>  mStreams GUARDED_BY(mLock);
         // hold onto binder to receive death notifications
         android::sp<IBinder>                            mBinder;
         bool                                            mExclusiveEnabled = true;
diff --git a/services/oboeservice/AAudioEndpointManager.h b/services/oboeservice/AAudioEndpointManager.h
index 1d38d26..cddc261 100644
--- a/services/oboeservice/AAudioEndpointManager.h
+++ b/services/oboeservice/AAudioEndpointManager.h
@@ -58,18 +58,25 @@
      * @param sharingMode
      * @return endpoint or null
      */
-    android::sp<AAudioServiceEndpoint> openEndpoint(android::AAudioService &audioService,
-                                        const aaudio::AAudioStreamRequest &request);
+    android::sp<AAudioServiceEndpoint> openEndpoint(
+            android::AAudioService &audioService,
+            const aaudio::AAudioStreamRequest &request)
+            EXCLUDES(mExclusiveLock, mSharedLock);
 
-    void closeEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
+    void closeEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint)
+            EXCLUDES(mExclusiveLock, mSharedLock);;
 
 private:
-    android::sp<AAudioServiceEndpoint> openExclusiveEndpoint(android::AAudioService &aaudioService,
-                                                 const aaudio::AAudioStreamRequest &request,
-                                                 sp<AAudioServiceEndpoint> &endpointToSteal);
+    android::sp<AAudioServiceEndpoint> openExclusiveEndpoint(
+            android::AAudioService &aaudioService,
+            const aaudio::AAudioStreamRequest &request,
+            sp<AAudioServiceEndpoint> &endpointToSteal)
+            EXCLUDES(mExclusiveLock);
 
-    android::sp<AAudioServiceEndpoint> openSharedEndpoint(android::AAudioService &aaudioService,
-                                              const aaudio::AAudioStreamRequest &request);
+    android::sp<AAudioServiceEndpoint> openSharedEndpoint(
+            android::AAudioService &aaudioService,
+            const aaudio::AAudioStreamRequest &request)
+            EXCLUDES(mSharedLock);
 
     android::sp<AAudioServiceEndpoint> findExclusiveEndpoint_l(
             const AAudioStreamConfiguration& configuration)
@@ -77,7 +84,8 @@
 
     android::sp<AAudioServiceEndpointShared> findSharedEndpoint_l(
             const AAudioStreamConfiguration& configuration)
-            REQUIRES(mSharedLock);
+            REQUIRES(mSharedLock)
+            EXCLUDES(mExclusiveLock);
 
     void closeExclusiveEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
     void closeSharedEndpoint(const android::sp<AAudioServiceEndpoint>& serviceEndpoint);
diff --git a/services/oboeservice/AAudioServiceEndpoint.h b/services/oboeservice/AAudioServiceEndpoint.h
index dff571b..6de95e0 100644
--- a/services/oboeservice/AAudioServiceEndpoint.h
+++ b/services/oboeservice/AAudioServiceEndpoint.h
@@ -55,9 +55,11 @@
      */
     virtual void close() = 0;
 
-    aaudio_result_t registerStream(const android::sp<AAudioServiceStreamBase>& stream);
+    aaudio_result_t registerStream(const android::sp<AAudioServiceStreamBase>& stream)
+            EXCLUDES(mLockStreams);
 
-    aaudio_result_t unregisterStream(const android::sp<AAudioServiceStreamBase>& stream);
+    aaudio_result_t unregisterStream(const android::sp<AAudioServiceStreamBase>& stream)
+            EXCLUDES(mLockStreams);
 
     virtual aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                         audio_port_handle_t *clientHandle) = 0;
@@ -148,9 +150,11 @@
      * @param portHandle
      * @return return true if a stream with the given portHandle is registered
      */
-    bool                     isStreamRegistered(audio_port_handle_t portHandle);
+    bool                     isStreamRegistered(audio_port_handle_t portHandle)
+                                    EXCLUDES(mLockStreams);
 
-    std::vector<android::sp<AAudioServiceStreamBase>> disconnectRegisteredStreams();
+    std::vector<android::sp<AAudioServiceStreamBase>> disconnectRegisteredStreams()
+            EXCLUDES(mLockStreams);
 
     mutable std::mutex       mLockStreams;
     std::vector<android::sp<AAudioServiceStreamBase>> mRegisteredStreams
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index f19005c..eaa578c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -19,7 +19,6 @@
 
 #include <atomic>
 #include <functional>
-#include <mutex>
 #include <vector>
 
 #include "client/AudioStreamInternal.h"
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.h b/services/oboeservice/AAudioServiceEndpointPlay.h
index 160a1de..704502e 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.h
+++ b/services/oboeservice/AAudioServiceEndpointPlay.h
@@ -19,7 +19,6 @@
 
 #include <atomic>
 #include <functional>
-#include <mutex>
 #include <vector>
 
 #include "client/AudioStreamInternal.h"
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index 0efb227..2c73953 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -18,7 +18,6 @@
 #define AAUDIO_SERVICE_ENDPOINT_SHARED_H
 
 #include <atomic>
-#include <mutex>
 
 #include <android-base/thread_annotations.h>
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 8f51ce4..d5061b3 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -76,7 +76,8 @@
     /**
      * Open the device.
      */
-    virtual aaudio_result_t open(const aaudio::AAudioStreamRequest &request) = 0;
+    virtual aaudio_result_t open(const aaudio::AAudioStreamRequest &request)
+            EXCLUDES(mUpMessageQueueLock);
 
     // We log the CLOSE from the close() method. We needed this separate method to log the OPEN
     // because we had to wait until we generated the handle.
@@ -269,7 +270,8 @@
 
         AudioEndpointParcelable* mParcelable;
     };
-    aaudio_result_t getDescription_l(AudioEndpointParcelable* parcelable) REQUIRES(mLock);
+    aaudio_result_t getDescription_l(AudioEndpointParcelable* parcelable)
+            REQUIRES(mLock) EXCLUDES(mUpMessageQueueLock);
 
     void setState(aaudio_stream_state_t state);
 
@@ -279,7 +281,8 @@
      */
     virtual aaudio_result_t startDevice();
 
-    aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command);
+    aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command)
+            EXCLUDES(mUpMessageQueueLock);
 
     aaudio_result_t sendCurrentTimestamp_l() REQUIRES(mLock);
 
@@ -342,7 +345,7 @@
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
-    std::shared_ptr<SharedRingBuffer> mUpMessageQueue;
+    std::shared_ptr<SharedRingBuffer> mUpMessageQueue PT_GUARDED_BY(mUpMessageQueueLock);
 
     enum : int32_t {
         START,
@@ -402,7 +405,7 @@
     /**
      * @return true if the queue is getting full.
      */
-    bool isUpMessageQueueBusy();
+    bool isUpMessageQueueBusy() EXCLUDES(mUpMessageQueueLock);
 
     aaudio_handle_t         mHandle = -1;
     bool                    mFlowing = false;
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index db3c8d0..42032d7 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -49,7 +49,8 @@
                             bool inService);
     ~AAudioServiceStreamMMAP() override = default;
 
-    aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
+    aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override
+            EXCLUDES(mUpMessageQueueLock);
 
     aaudio_result_t startClient(const android::AudioClient& client,
                                 const audio_attributes_t *attr,
diff --git a/services/oboeservice/AAudioServiceStreamShared.h b/services/oboeservice/AAudioServiceStreamShared.h
index 0b2513a..c6b74e1 100644
--- a/services/oboeservice/AAudioServiceStreamShared.h
+++ b/services/oboeservice/AAudioServiceStreamShared.h
@@ -50,7 +50,8 @@
 
     std::string dump() const override;
 
-    aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
+    aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override
+            EXCLUDES(mUpMessageQueueLock);
 
     void writeDataIfRoom(int64_t mmapFramesRead, const void *buffer, int32_t numFrames);
 
@@ -107,7 +108,7 @@
 
 private:
 
-    std::shared_ptr<SharedRingBuffer> mAudioDataQueue GUARDED_BY(audioDataQueueLock);
+    std::shared_ptr<SharedRingBuffer> mAudioDataQueue PT_GUARDED_BY(audioDataQueueLock);
 
     std::atomic<int64_t>     mTimestampPositionOffset;
     std::atomic<int32_t>     mXRunCount;
diff --git a/services/oboeservice/AAudioStreamTracker.h b/services/oboeservice/AAudioStreamTracker.h
index 99f4b6c..51a783d 100644
--- a/services/oboeservice/AAudioStreamTracker.h
+++ b/services/oboeservice/AAudioStreamTracker.h
@@ -37,7 +37,7 @@
      * @param streamHandle
      * @return number of streams removed
      */
-    int32_t removeStreamByHandle(aaudio_handle_t streamHandle);
+    int32_t removeStreamByHandle(aaudio_handle_t streamHandle) EXCLUDES(mHandleLock);
 
     /**
      * Look up a stream based on the handle.
@@ -46,7 +46,7 @@
      * @return strong pointer to the stream if found, or nullptr
      */
     android::sp<aaudio::AAudioServiceStreamBase> getStreamByHandle(
-            aaudio_handle_t streamHandle);
+            aaudio_handle_t streamHandle) EXCLUDES(mHandleLock);
 
     /**
      * Look up a stream based on the AudioPolicy portHandle.
@@ -56,7 +56,7 @@
      * @return strong pointer to the stream if found, or nullptr
      */
     android::sp<aaudio::AAudioServiceStreamBase> findStreamByPortHandle(
-            audio_port_handle_t portHandle);
+            audio_port_handle_t portHandle) EXCLUDES(mHandleLock);
 
     /**
      * Store a strong pointer to the stream and return a unique handle for future reference.
@@ -64,7 +64,8 @@
      * @param serviceStream
      * @return handle for identifying the stream
      */
-    aaudio_handle_t addStreamForHandle(const android::sp<AAudioServiceStreamBase>& serviceStream);
+    aaudio_handle_t addStreamForHandle(const android::sp<AAudioServiceStreamBase>& serviceStream)
+            EXCLUDES(mHandleLock);
 
     /**
      * @return string that can be added to dumpsys