Merge "Mute: Update logging" into main
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 1832f94..3092091 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -29,20 +29,48 @@
 flag {
   name: "in_process_sw_audio_codec"
   namespace: "codec_fwk"
-  description: "Feature flag for in-process software audio codec support"
+  description: "Feature flag for in-process software audio codec API"
   bug: "297922713"
 }
 
 flag {
+  name: "in_process_sw_audio_codec_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for in-process software audio codec support"
+  bug: "325520135"
+}
+
+flag {
+  name: "large_audio_frame_finish"
+  namespace: "codec_fwk"
+  description: "Implementation flag for large audio frame finishing tasks"
+  bug: "325512893"
+}
+
+flag {
   name: "null_output_surface"
   namespace: "codec_fwk"
-  description: "Feature flag for null output Surface support"
+  description: "Feature flag for null output Surface API"
   bug: "297920102"
 }
 
 flag {
+  name: "null_output_surface_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for null output Surface support"
+  bug: "325550522"
+}
+
+flag {
   name: "region_of_interest"
   namespace: "codec_fwk"
-  description: "Feature flag for region of interest support"
+  description: "Feature flag for region of interest API"
   bug: "299191092"
 }
+
+flag {
+  name: "region_of_interest_support"
+  namespace: "codec_fwk"
+  description: "Feature flag for region of interest support"
+  bug: "325549730"
+}
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index 47ceed5..a34cef1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "codec2_hidl_hal_master_test"
 
 #include <android-base/logging.h>
+#include <android-base/properties.h>
 #include <gtest/gtest.h>
 #include <hidl/GtestPrinter.h>
 #include <hidl/ServiceManagement.h>
@@ -82,6 +83,20 @@
     }
 }
 
+TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
+    static int sBoardFirstApiLevel = android::base::GetIntProperty("ro.board.first_api_level", 0);
+    static int sBoardApiLevel = android::base::GetIntProperty("ro.board.api_level", 0);
+    if (sBoardFirstApiLevel < 202404 && sBoardApiLevel < 202404) {
+        GTEST_SKIP() << "board first level less than 202404:"
+                     << " ro.board.first_api_level = " << sBoardFirstApiLevel
+                     << " ro.board.api_level = " << sBoardApiLevel;
+    }
+    ALOGV("HidlCodecAllowed Test");
+
+    EXPECT_NE(mClient->getAidlBase(), nullptr) << "android.hardware.media.c2 MUST use AIDL "
+                                               << "for chipsets launching at 202404 or above";
+}
+
 }  // anonymous namespace
 
 INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2MasterHalTest,
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 565427b..98a1fde 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1699,29 +1699,42 @@
 }
 
 status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) {
+    status_t result = NO_ERROR;
     AutoMutex lock(mLock);
-    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d mRoutedDeviceId %d",
-            __func__, mPortId, deviceId, mSelectedDeviceId, mRoutedDeviceId);
+    ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
+            __func__, mPortId, deviceId, mSelectedDeviceId);
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
-            // allow track invalidation when track is not playing to propagate
-            // the updated mSelectedDeviceId
-            if (isPlaying_l()) {
-                if (mSelectedDeviceId != mRoutedDeviceId) {
-                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-                    mProxy->interrupt();
+            if (isOffloadedOrDirect_l()) {
+                if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
+                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
+                } else {
+                    ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
+                          "State: %s.",
+                            __func__, mPortId, stateToString(mState));
+                    result = INVALID_OPERATION;
                 }
             } else {
-                // if the track is idle, try to restore now and
-                // defer to next start if not possible
-                if (restoreTrack_l("setOutputDevice") != OK) {
-                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                // allow track invalidation when track is not playing to propagate
+                // the updated mSelectedDeviceId
+                if (isPlaying_l()) {
+                    if (mSelectedDeviceId != mRoutedDeviceId) {
+                        android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                        mProxy->interrupt();
+                    }
+                } else {
+                    // if the track is idle, try to restore now and
+                    // defer to next start if not possible
+                    if (restoreTrack_l("setOutputDevice") != OK) {
+                        android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                    }
                 }
             }
         }
     }
-    return NO_ERROR;
+    return result;
 }
 
 audio_port_handle_t AudioTrack::getOutputDevice() {
@@ -2836,7 +2849,7 @@
     return 0;
 }
 
-status_t AudioTrack::restoreTrack_l(const char *from)
+status_t AudioTrack::restoreTrack_l(const char *from, bool forceRestore)
 {
     status_t result = NO_ERROR;  // logged: make sure to set this before returning.
     const int64_t beginNs = systemTime();
@@ -2857,7 +2870,8 @@
     // output parameters and new IAudioFlinger in createTrack_l()
     AudioSystem::clearAudioConfigCache();
 
-    if (isOffloadedOrDirect_l() || mDoNotReconnect) {
+    if (!forceRestore &&
+        (isOffloadedOrDirect_l() || mDoNotReconnect)) {
         // FIXME re-creation of offloaded and direct tracks is not yet implemented;
         // Disabled since (1) timestamp correction is not implemented for non-PCM and
         // (2) We pre-empt existing direct tracks on resource constraint, so these tracks
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 523383f..19780ae 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1220,7 +1220,7 @@
             void setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount);
 
             // FIXME enum is faster than strcmp() for parameter 'from'
-            status_t restoreTrack_l(const char *from);
+            status_t restoreTrack_l(const char *from, bool forceRestore = false);
 
             uint32_t    getUnderrunCount_l() const;
 
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index b3697bf..14765f6b 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -274,15 +274,16 @@
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
-status_t DeviceHalAidl::getInputBufferSize(const struct audio_config* config, size_t* size) {
+status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
     if (config == nullptr || size == nullptr) {
         return BAD_VALUE;
     }
+    constexpr bool isInput = true;
     AudioConfig aidlConfig = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, true /*isInput*/));
+            ::aidl::android::legacy2aidl_audio_config_t_AudioConfig(*config, isInput));
     AudioDevice aidlDevice;
     aidlDevice.type.type = AudioDeviceType::IN_DEFAULT;
     AudioSource aidlSource = AudioSource::DEFAULT;
@@ -296,6 +297,9 @@
                         0 /*handle*/, aidlDevice, aidlFlags, aidlSource,
                         &cleanups, &aidlConfig, &mixPortConfig, &aidlPatch));
     }
+    *config = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::aidl2legacy_AudioConfig_audio_config_t(aidlConfig, isInput));
+    if (mixPortConfig.id == 0) return BAD_VALUE;  // HAL suggests a different config.
     *size = aidlConfig.frameCount *
             getFrameSizeInBytes(aidlConfig.base.format, aidlConfig.base.channelMask);
     // Do not disarm cleanups to release temporary port configs.
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index f705db7..d925b46 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -113,7 +113,7 @@
     status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+    status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index e8e1f46..478e0f0 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -236,7 +236,7 @@
 }
 
 status_t DeviceHalHidl::getInputBufferSize(
-        const struct audio_config *config, size_t *size) {
+        struct audio_config *config, size_t *size) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     AudioConfig hidlConfig;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 7a712df..1362dab 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -67,7 +67,7 @@
     status_t getParameters(const String8& keys, String8 *values) override;
 
     // Returns audio input buffer size according to parameters passed.
-    status_t getInputBufferSize(const struct audio_config* config, size_t* size) override;
+    status_t getInputBufferSize(struct audio_config* config, size_t* size) override;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index ebda86a..b1b1dfe 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -53,6 +53,7 @@
 #include "effectsAidlConversion/AidlConversionVisualizer.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
@@ -285,6 +286,7 @@
 
 status_t EffectHalAidl::close() {
     TIME_CHECK();
+    mEffect->command(CommandId::STOP);
     return statusTFromBinderStatus(mEffect->close());
 }
 
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index bb5f851..7f6c1fb 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -78,8 +78,9 @@
     virtual status_t getParameters(const String8& keys, String8 *values) = 0;
 
     // Returns audio input buffer size according to parameters passed.
-    virtual status_t getInputBufferSize(const struct audio_config *config,
-            size_t *size) = 0;
+    // If there is no possibility for the HAL to open an input with the provided
+    // parameters, the method will return BAD_VALUE and modify the provided `config`.
+    virtual status_t getInputBufferSize(struct audio_config *config, size_t *size) = 0;
 
     // Creates and opens the audio hardware output stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index a9d0cc2..aa18deb 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -284,15 +284,15 @@
 
         // roundoff
         int maxLevelRound = (int)(totalEnergyEstimation + 0.99);
-        if (maxLevelRound + mVolume > 0) {
-            gainCorrection = maxLevelRound + mVolume;
+        if (maxLevelRound + mVolumedB > 0) {
+            gainCorrection = maxLevelRound + mVolumedB;
         }
 
-        params.VC_EffectLevel = mVolume - gainCorrection;
+        params.VC_EffectLevel = mVolumedB - gainCorrection;
         if (params.VC_EffectLevel < -96) {
             params.VC_EffectLevel = -96;
         }
-        LOG(INFO) << "\tVol: " << mVolume << ", GainCorrection: " << gainCorrection
+        LOG(INFO) << "\tVol: " << mVolumedB << ", GainCorrection: " << gainCorrection
                   << ", Actual vol: " << params.VC_EffectLevel;
 
         /* Activate the initial settings */
@@ -576,25 +576,25 @@
 
 RetCode BundleContext::setVolumeLevel(float level) {
     if (mMuteEnabled) {
-        mLevelSaved = level;
+        mLevelSaveddB = level;
     } else {
-        mVolume = level;
+        mVolumedB = level;
     }
     LOG(INFO) << __func__ << " success with level " << level;
     return limitLevel();
 }
 
 float BundleContext::getVolumeLevel() const {
-    return (mMuteEnabled ? mLevelSaved : mVolume);
+    return (mMuteEnabled ? mLevelSaveddB : mVolumedB);
 }
 
 RetCode BundleContext::setVolumeMute(bool mute) {
     mMuteEnabled = mute;
     if (mMuteEnabled) {
-        mLevelSaved = mVolume;
-        mVolume = -96;
+        mLevelSaveddB = mVolumedB;
+        mVolumedB = -96;
     } else {
-        mVolume = mLevelSaved;
+        mVolumedB = mLevelSaveddB;
     }
     return limitLevel();
 }
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index af46818..d823030 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -124,8 +124,8 @@
     bool mVirtualizerTempDisabled = false;
     ::aidl::android::media::audio::common::AudioDeviceDescription mForceDevice;
     // Volume
-    float mLevelSaved = 0; /* for when mute is set, level must be saved */
-    float mVolume = 0;
+    float mLevelSaveddB = 0; /* for when mute is set, level must be saved */
+    float mVolumedB = 0;
     bool mMuteEnabled = false; /* Must store as mute = -96dB level */
 
     RetCode initControlParameter(LVM_ControlParams_t& params) const;
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index 143329d..daabdb7 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -129,8 +129,7 @@
                    .implementor = "NXP Software Ltd."},
         .capability = kVirtualizerCap};
 
-static const std::vector<Range::VolumeRange> kVolumeRanges = {
-        MAKE_RANGE(Volume, levelDb, -9600, 0)};
+static const std::vector<Range::VolumeRange> kVolumeRanges = {MAKE_RANGE(Volume, levelDb, -96, 0)};
 static const Capability kVolumeCap = {.range = kVolumeRanges};
 static const std::string kVolumeEffectName = "Volume";
 static const Descriptor kVolumeDesc = {
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index fa651a6..9b1bac6 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -59,7 +59,8 @@
 const std::string VisualizerImpl::kEffectName = "Visualizer";
 const std::vector<Range::VisualizerRange> VisualizerImpl::kRanges = {
         MAKE_RANGE(Visualizer, latencyMs, 0, VisualizerContext::kMaxLatencyMs),
-        MAKE_RANGE(Visualizer, captureSamples, 0, VisualizerContext::kMaxCaptureBufSize),
+        MAKE_RANGE(Visualizer, captureSamples, VisualizerContext::kMinCaptureBufSize,
+                   VisualizerContext::kMaxCaptureBufSize),
         /* get only parameters, set invalid range (min > max) to indicate not support set */
         MAKE_RANGE(Visualizer, measurement, Visualizer::Measurement({.rms = 1, .peak = 1}),
                    Visualizer::Measurement({.rms = 0, .peak = 0})),
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 5d2bb3a..c763b1a 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -93,7 +93,7 @@
     mCaptureSamples = samples;
     return RetCode::SUCCESS;
 }
-int VisualizerContext::getCaptureSamples() {
+int32_t VisualizerContext::getCaptureSamples() {
     std::lock_guard lg(mMutex);
     return mCaptureSamples;
 }
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 958035f..b03e038 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -18,6 +18,7 @@
 
 #include <android-base/thread_annotations.h>
 #include <audio_effects/effect_dynamicsprocessing.h>
+#include <system/audio_effects/effect_visualizer.h>
 
 #include "effect-impl/EffectContext.h"
 
@@ -25,8 +26,11 @@
 
 class VisualizerContext final : public EffectContext {
   public:
-    static const uint32_t kMaxCaptureBufSize = 65536;
-    static const uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
+    // need align the min/max capture size to VISUALIZER_CAPTURE_SIZE_MIN and
+    // VISUALIZER_CAPTURE_SIZE_MAX because of limitation in audio_utils fixedfft.
+    static constexpr int32_t kMinCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MIN;
+    static constexpr int32_t kMaxCaptureBufSize = VISUALIZER_CAPTURE_SIZE_MAX;
+    static constexpr uint32_t kMaxLatencyMs = 3000;  // 3 seconds of latency for audio pipeline
 
     VisualizerContext(int statusDepth, const Parameter::Common& common);
     ~VisualizerContext();
@@ -38,8 +42,8 @@
     // keep all parameters and reset buffer.
     void reset();
 
-    RetCode setCaptureSamples(int captureSize);
-    int getCaptureSamples();
+    RetCode setCaptureSamples(int32_t captureSize);
+    int32_t getCaptureSamples();
     RetCode setMeasurementMode(Visualizer::MeasurementMode mode);
     Visualizer::MeasurementMode getMeasurementMode();
     RetCode setScalingMode(Visualizer::ScalingMode mode);
@@ -86,7 +90,7 @@
     // capture buf with 8 bits mono PCM samples
     std::array<uint8_t, kMaxCaptureBufSize> mCaptureBuf GUARDED_BY(mMutex);
     uint32_t mDownstreamLatency GUARDED_BY(mMutex) = 0;
-    uint32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
+    int32_t mCaptureSamples GUARDED_BY(mMutex) = kMaxCaptureBufSize;
 
     // to avoid recomputing it every time a buffer is processed
     uint8_t mChannelCount GUARDED_BY(mMutex) = 0;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index d50c06b..a18dbfe 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -1640,6 +1640,11 @@
     ALOGV("buffer->range_length:%lld", (long long)buffer->range_length());
     if (buffer->meta_data().findInt64(kKeySampleFileOffset, &offset)) {
         ALOGV("offset:%lld, old_offset:%lld", (long long)offset, (long long)old_offset);
+        if (mMaxOffsetAppend > offset) {
+            // This has already been appended, skip updating mOffset value.
+            *bytesWritten = buffer->range_length();
+            return offset;
+        }
         if (old_offset == offset) {
             mOffset += buffer->range_length();
         } else {
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e7b4fe7..725e5a6 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1957,7 +1957,7 @@
     mHardwareStatus = AUDIO_HW_IDLE;
 
     // Change parameters of the configuration each iteration until we find a
-    // configuration that the device will support.
+    // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
     for (auto testChannelMask : channelMasks) {
         config.channel_mask = testChannelMask;
@@ -1967,11 +1967,16 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
+                audio_config_t loopConfig = config;
                 status_t result = dev->getInputBufferSize(&config, &bytes);
+                if (result == BAD_VALUE) {
+                    // Retry with the config suggested by the HAL.
+                    result = dev->getInputBufferSize(&config, &bytes);
+                }
                 if (result != OK || bytes == 0) {
+                    config = loopConfig;
                     continue;
                 }
-
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
                     uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 8d40b63..3b764d1 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -231,10 +231,12 @@
         ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
         return AUDIO_PORT_HANDLE_NONE;
     }
-    const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
-    if (btDeviceIt != mBluetoothDevicesWithCsd.end()) {
-        if (!btDeviceIt->second) {
-            ALOGI("%s: bt device %s does not support sound dose", __func__, adt.toString().c_str());
+
+    if (audio_is_ble_out_device(type) || audio_is_a2dp_device(type)) {
+        const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
+        if (btDeviceIt == mBluetoothDevicesWithCsd.end() || !btDeviceIt->second) {
+            ALOGI("%s: bt device %s does not support sound dose", __func__,
+                  adt.toString().c_str());
             return AUDIO_PORT_HANDLE_NONE;
         }
     }
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index f2d1414..14e5fad 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -17,12 +17,13 @@
 #define LOG_TAG "AidlUtils"
 
 #include <aidl/AidlUtils.h>
+#include <aidl/ExtensionMetadataTags.h>
 #include <aidl/VndkVersionMetadataTags.h>
 #include <aidlcommonsupport/NativeHandle.h>
+#include <camera/StringUtils.h>
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
-#include <camera/StringUtils.h>
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -333,4 +334,47 @@
     return OK;
 }
 
+bool areExtensionKeysSupported(const CameraMetadata& metadata) {
+    auto requestKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    if (requestKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    auto resultKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    if (resultKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_RESULT_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    for (const auto& extensionKey : extension_metadata_keys) {
+        if (std::find(requestKeys.data.i32, requestKeys.data.i32 + requestKeys.count, extensionKey)
+                != requestKeys.data.i32 + requestKeys.count) {
+            return true;
+        }
+
+        if (std::find(resultKeys.data.i32, resultKeys.data.i32 + resultKeys.count, extensionKey)
+                != resultKeys.data.i32 + resultKeys.count) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/) {
+    if (metadata == nullptr) {
+        return BAD_VALUE;
+    }
+
+    for (const auto& key : extension_metadata_keys) {
+        status_t res = metadata->erase(key);
+        if (res != OK) {
+            ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+            return res;
+        }
+    }
+    return OK;
+}
+
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
index c89d7ff..562aa70 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.h
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -122,6 +122,9 @@
 
 status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
 
+bool areExtensionKeysSupported(const CameraMetadata& metadata);
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/);
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
 
 #endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
new file mode 100644
index 0000000..86af36c
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from extensions_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::vector<camera_metadata_tag> extension_metadata_keys{
+            ANDROID_EXTENSION_STRENGTH,
+            ANDROID_EXTENSION_CURRENT_TYPE,
+            ANDROID_EFV_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_AUTO_ZOOM,
+            ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_STABILIZATION_MODE,
+            ANDROID_EFV_TRANSLATE_VIEWPORT,
+            ANDROID_EFV_ROTATE_VIEWPORT,
+            ANDROID_EFV_PADDING_REGION,
+            ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+            ANDROID_EFV_TARGET_COORDINATES,
+};
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c0a0544..9792089 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -78,6 +78,7 @@
 using namespace android::camera3;
 using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+using namespace android::hardware::cameraservice::utils::conversion::aidl;
 
 namespace flags = com::android::internal::camera::flags;
 namespace android {
@@ -254,6 +255,8 @@
         return res;
     }
 
+    mSupportsExtensionKeys = areExtensionKeysSupported(mDeviceInfo);
+
     return OK;
 }
 
@@ -2531,10 +2534,13 @@
 
     config.streams = streams.editArray();
     config.hal_buffer_managed_streams = mHalBufManagedStreamIds;
+    config.use_hal_buf_manager = mUseHalBufManager;
 
     // Do the HAL configuration; will potentially touch stream
-    // max_buffers, usage, and priv fields, as well as data_space and format
-    // fields for IMPLEMENTATION_DEFINED formats.
+    // max_buffers, usage, priv fields, data_space and format
+    // fields for IMPLEMENTATION_DEFINED formats as well as hal buffer managed
+    // streams and use_hal_buf_manager (in case aconfig flag session_hal_buf_manager
+    // is not enabled but the HAL supports session specific hal buffer manager).
 
     int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
     const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
@@ -2554,6 +2560,8 @@
                 strerror(-res), res);
         return res;
     }
+    // It is possible that use hal buffer manager behavior was changed by the
+    // configureStreams call.
     mUseHalBufManager = config.use_hal_buf_manager;
     if (flags::session_hal_buf_manager()) {
         bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
@@ -3887,13 +3895,22 @@
 
                     for (it = captureRequest->mSettingsList.begin();
                             it != captureRequest->mSettingsList.end(); it++) {
-                        res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
-                                mVndkVersion, it->metadata, false /*isStatic*/);
+                        res = filterVndkKeys(mVndkVersion, it->metadata, false /*isStatic*/);
                         if (res != OK) {
                             SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
                                     "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
                             return INVALID_OPERATION;
                         }
+
+                        if (!parent->mSupportsExtensionKeys) {
+                            res = filterExtensionKeys(&it->metadata);
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Failed during extension filter of capture "
+                                        "requests %d: %s (%d)", halRequest->frame_number,
+                                        strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                        }
                     }
                 }
             }
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 498ef55..ac4b039 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1523,6 +1523,9 @@
     // AE_TARGET_FPS_RANGE
     bool mIsFixedFps = false;
 
+    // Flag to indicate that we shouldn't forward extension related metadata
+    bool mSupportsExtensionKeys = false;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 13c500f..e8ef692 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -913,7 +913,6 @@
     std::set<int> activeStreams;
     camera::device::StreamConfiguration requestedConfiguration;
     requestedConfiguration.streams.resize(config->num_streams);
-    config->use_hal_buf_manager = mUseHalBufManager;
     for (size_t i = 0; i < config->num_streams; i++) {
         camera::device::Stream &dst = requestedConfiguration.streams[i];
         camera3::camera_stream_t *src = config->streams[i];
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index 74b2e2c..90530f6 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -12,6 +12,7 @@
         "libbinder",
         "libbinder_ndk",
         "libcamera_metadata",
+        "libexif",
         "liblog",
         "libfmq",
         "libgui",
@@ -47,7 +48,7 @@
     name: "libvirtualcamera_utils",
     srcs: [
         "util/JpegUtil.cc",
-        "util/MetadataBuilder.cc",
+        "util/MetadataUtil.cc",
         "util/Util.cc",
         "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 947b355..a70f6cf 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -38,7 +38,7 @@
 #include "android/binder_status.h"
 #include "log/log.h"
 #include "system/camera_metadata.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
@@ -81,10 +81,45 @@
 
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
+constexpr float kAspectRatioEpsilon = 0.05;
+
+const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
+    Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
+    Resolution(240, 160), Resolution(240, 180)};
+
 const std::array<PixelFormat, 3> kOutputFormats{
     PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
     PixelFormat::BLOB};
 
+bool isApproximatellySameAspectRatio(const Resolution r1, const Resolution r2) {
+  float aspectRatio1 =
+      static_cast<float>(r1.width) / static_cast<float>(r1.height);
+  float aspectRatio2 =
+      static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+  return abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
+std::vector<Resolution> getSupportedJpegThumbnailSizes(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  auto isSupportedByAnyInputConfig =
+      [&configs](const Resolution thumbnailResolution) {
+        return std::any_of(
+            configs.begin(), configs.end(),
+            [thumbnailResolution](const SupportedStreamConfiguration& config) {
+              return isApproximatellySameAspectRatio(
+                  thumbnailResolution, Resolution(config.width, config.height));
+            });
+      };
+
+  std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
+  std::copy_if(kStandardJpegThumbnailSizes.begin(),
+               kStandardJpegThumbnailSizes.end(),
+               std::back_insert_iterator(supportedThumbnailSizes),
+               isSupportedByAnyInputConfig);
+  return supportedThumbnailSizes;
+}
+
 bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
   return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
          kOutputFormats.end();
@@ -199,8 +234,8 @@
           .setControlAeLockAvailable(false)
           .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
           .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
-          // TODO(b/301023410) Add JPEG Exif + thumbnail support.
-          .setJpegAvailableThumbnailSizes({Resolution(0, 0)})
+          .setJpegAvailableThumbnailSizes(
+              getSupportedJpegThumbnailSizes(supportedInputConfig))
           .setMaxJpegSize(kMaxJpegSize)
           .setMaxFrameDuration(kMaxFrameDuration)
           .setMaxNumberOutputStreams(
@@ -209,24 +244,34 @@
               VirtualCameraDevice::kMaxNumberOfStallStreams)
           .setPipelineMaxDepth(kPipelineMaxDepth)
           .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
-          .setAvailableRequestKeys(
-              {ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_AE_MODE,
-               ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
-               ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
-               ANDROID_CONTROL_AE_ANTIBANDING_MODE,
-               ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_TRIGGER,
-               ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
-               ANDROID_SCALER_CROP_REGION, ANDROID_CONTROL_EFFECT_MODE,
-               ANDROID_CONTROL_MODE, ANDROID_CONTROL_SCENE_MODE,
-               ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
-               ANDROID_CONTROL_ZOOM_RATIO, ANDROID_STATISTICS_FACE_DETECT_MODE,
-               ANDROID_FLASH_MODE})
+          .setAvailableRequestKeys({ANDROID_CONTROL_CAPTURE_INTENT,
+                                    ANDROID_CONTROL_AE_MODE,
+                                    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                    ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                                    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                                    ANDROID_CONTROL_AF_TRIGGER,
+                                    ANDROID_CONTROL_AF_MODE,
+                                    ANDROID_CONTROL_AWB_MODE,
+                                    ANDROID_SCALER_CROP_REGION,
+                                    ANDROID_CONTROL_EFFECT_MODE,
+                                    ANDROID_CONTROL_MODE,
+                                    ANDROID_CONTROL_SCENE_MODE,
+                                    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+                                    ANDROID_CONTROL_ZOOM_RATIO,
+                                    ANDROID_STATISTICS_FACE_DETECT_MODE,
+                                    ANDROID_FLASH_MODE,
+                                    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                                    ANDROID_JPEG_QUALITY,
+                                    ANDROID_JPEG_THUMBNAIL_QUALITY})
           .setAvailableResultKeys(
               {ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
                ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AWB_MODE,
                ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
                ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
-               ANDROID_SENSOR_TIMESTAMP, ANDROID_LENS_FOCAL_LENGTH})
+               ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ANDROID_JPEG_QUALITY,
+               ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_SENSOR_TIMESTAMP,
+               ANDROID_LENS_FOCAL_LENGTH})
           .setAvailableCapabilities(
               {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
 
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index 720f02e..c274dc9 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -103,20 +103,23 @@
   Resolution getMaxInputResolution() const;
 
   // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
-  static const int32_t kMaxNumberOfRawStreams = 0;
+  static constexpr int32_t kMaxNumberOfRawStreams = 0;
 
   // Maximal number of non-jpeg streams configured concurrently in single
   // session. This should be at least 3 and can be increased at the potential
   // cost of more CPU/GPU load if there are many concurrent streams.
-  static const int32_t kMaxNumberOfProcessedStreams = 3;
+  static constexpr int32_t kMaxNumberOfProcessedStreams = 3;
 
   // Maximal number of stalling (in case of virtual camera only jpeg for now)
   // streams. Can be increaed at the cost of potential cost of more GPU/CPU
   // load.
-  static const int32_t kMaxNumberOfStallStreams = 1;
+  static constexpr int32_t kMaxNumberOfStallStreams = 1;
 
   // Focal length for full frame sensor.
-  constexpr static const float kFocalLength = 43.0;
+  static constexpr float kFocalLength = 43.0;
+
+  // Default JPEG compression quality.
+  static constexpr uint8_t kDefaultJpegQuality = 80;
 
  private:
   std::shared_ptr<VirtualCameraDevice> sharedFromThis();
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 7bbc6ea..615e449 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -19,16 +19,21 @@
 
 #include <chrono>
 #include <cstdint>
+#include <cstring>
 #include <future>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <vector>
 
+#include "Exif.h"
 #include "GLES/gl.h"
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureResult.h"
 #include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -42,7 +47,7 @@
 #include "ui/GraphicBuffer.h"
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/TestPatternHelper.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
@@ -53,6 +58,8 @@
 
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::device::CaptureResult;
 using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,6 +72,8 @@
 using ::aidl::android::hardware::graphics::common::PixelFormat;
 using ::android::base::ScopedLockAssertion;
 
+using ::android::hardware::camera::common::helper::ExifUtils;
+
 namespace {
 
 using namespace std::chrono_literals;
@@ -76,8 +85,11 @@
 // documented minimum of 2.
 static constexpr uint8_t kPipelineDepth = 2;
 
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
+
 CameraMetadata createCaptureResultMetadata(
     const std::chrono::nanoseconds timestamp,
+    const RequestSettings& requestSettings,
     const Resolution reportedSensorSize) {
   std::unique_ptr<CameraMetadata> metadata =
       MetadataBuilder()
@@ -93,6 +105,10 @@
           .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
           .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
           .setFocalLength(VirtualCameraDevice::kFocalLength)
+          .setJpegQuality(requestSettings.jpegQuality)
+          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+                                requestSettings.thumbnailResolution.height)
+          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
           .setPipelineDepth(kPipelineDepth)
           .setSensorTimestamp(timestamp)
           .build();
@@ -171,6 +187,34 @@
   }
 }
 
+std::vector<uint8_t> createExif(
+    Resolution imageSize, const std::vector<uint8_t>& compressedThumbnail = {}) {
+  std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+  exifUtils->initialize();
+  exifUtils->setImageWidth(imageSize.width);
+  exifUtils->setImageHeight(imageSize.height);
+  // TODO(b/324383963) Set Make/Model and orientation.
+
+  std::vector<uint8_t> app1Data;
+
+  size_t thumbnailDataSize = compressedThumbnail.size();
+  const void* thumbnailData =
+      thumbnailDataSize > 0
+          ? reinterpret_cast<const void*>(compressedThumbnail.data())
+          : nullptr;
+
+  if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+    ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+    return app1Data;
+  }
+
+  const uint8_t* data = exifUtils->getApp1Buffer();
+  const size_t size = exifUtils->getApp1Length();
+
+  app1Data.insert(app1Data.end(), data, data + size);
+  return app1Data;
+}
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -209,8 +253,11 @@
 }
 
 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
-    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
-    : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+    const RequestSettings& requestSettings)
+    : mFrameNumber(frameNumber),
+      mBuffers(requestBuffers),
+      mRequestSettings(requestSettings) {
 }
 
 int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -222,6 +269,10 @@
   return mBuffers;
 }
 
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+  return mRequestSettings;
+}
+
 void VirtualCameraRenderThread::enqueueTask(
     std::unique_ptr<ProcessCaptureRequestTask> task) {
   std::lock_guard<std::mutex> lock(mLock);
@@ -308,8 +359,8 @@
   captureResult.partialResult = 1;
   captureResult.inputBuffer.streamId = -1;
   captureResult.physicalCameraMetadata.resize(0);
-  captureResult.result =
-      createCaptureResultMetadata(timestamp, mReportedSensorSize);
+  captureResult.result = createCaptureResultMetadata(
+      timestamp, request.getRequestSettings(), mReportedSensorSize);
 
   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
   captureResult.outputBuffers.resize(buffers.size());
@@ -338,9 +389,9 @@
     }
 
     auto status = streamConfig->format == PixelFormat::BLOB
-                      ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
-                                                   reqBuffer.getBufferId(),
-                                                   reqBuffer.getFence())
+                      ? renderIntoBlobStreamBuffer(
+                            reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+                            request.getRequestSettings(), reqBuffer.getFence())
                       : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
                                                     reqBuffer.getBufferId(),
                                                     reqBuffer.getFence());
@@ -420,9 +471,70 @@
   }
 }
 
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+    const Resolution resolution, const int quality) {
+  if (resolution.width == 0 || resolution.height == 0) {
+    ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+    return {};
+  }
+
+  ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+        resolution.width, resolution.height, quality);
+  std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+      mEglDisplayContext->getEglDisplay(), resolution.width, resolution.height);
+  if (framebuffer == nullptr) {
+    ALOGE(
+        "Failed to allocate temporary framebuffer for JPEG thumbnail "
+        "compression");
+    return {};
+  }
+
+  // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+  // doesn't correspond
+  //  to input texture aspect ratio.
+  if (!renderIntoEglFramebuffer(*framebuffer).isOk()) {
+    ALOGE(
+        "Failed to render input texture into temporary framebuffer for JPEG "
+        "thumbnail");
+    return {};
+  }
+
+  std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+
+  if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+    // This should never happen since we're allocating the temporary buffer
+    // with YUV420 layout above.
+    ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+          gBuffer->getPixelFormat());
+    return {};
+  }
+
+  YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+  if (yCbCrLock.getStatus() != NO_ERROR) {
+    ALOGE("%s: Failed to lock graphic buffer while generating thumbnail: %d",
+          __func__, yCbCrLock.getStatus());
+    return {};
+  }
+
+  std::vector<uint8_t> compressedThumbnail;
+  compressedThumbnail.resize(kJpegThumbnailBufferSize);
+  ALOGE("%s: Compressing thumbnail %d x %d", __func__, gBuffer->getWidth(),
+        gBuffer->getHeight());
+  std::optional<size_t> compressedSize = compressJpeg(
+      gBuffer->getWidth(), gBuffer->getHeight(), quality, *yCbCrLock, {},
+      compressedThumbnail.size(), compressedThumbnail.data());
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+    return {};
+  }
+  compressedThumbnail.resize(compressedSize.value());
+  return compressedThumbnail;
+}
+
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
-    const int streamId, const int bufferId, sp<Fence> fence) {
-  ALOGV("%s", __func__);
+    const int streamId, const int bufferId,
+    const RequestSettings& requestSettings, sp<Fence> fence) {
   std::shared_ptr<AHardwareBuffer> hwBuffer =
       mSessionContext.fetchHardwareBuffer(streamId, bufferId);
   if (hwBuffer == nullptr) {
@@ -437,6 +549,9 @@
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
+  ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+        stream->width, stream->height, requestSettings.jpegQuality);
+
   // Let's create YUV framebuffer and render the surface into this.
   // This will take care about rescaling as well as potential format conversion.
   std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
@@ -453,58 +568,63 @@
     return status;
   }
 
-  AHardwareBuffer_Planes planes_info;
-
-  int32_t rawFence = fence != nullptr ? fence->get() : -1;
-  int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
-                                          AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
-                                          rawFence, nullptr, &planes_info);
-  if (result != OK) {
-    ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
+  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+                             fence);
+  if (planesLock.getStatus() != OK) {
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
   std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
   GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
 
-  bool compressionSuccess = true;
+  std::optional<size_t> compressedSize;
   if (gBuffer != nullptr) {
-    android_ycbcr ycbcr;
     if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
       // This should never happen since we're allocating the temporary buffer
       // with YUV420 layout above.
       ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
             gBuffer->getPixelFormat());
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
       return cameraStatus(Status::INTERNAL_ERROR);
     }
 
-    status_t status =
-        gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
-    ALOGV("Locked buffers");
-    if (status != NO_ERROR) {
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
+    YCbCrLockGuard yCbCrLock(inHwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+    if (yCbCrLock.getStatus() != OK) {
       return cameraStatus(Status::INTERNAL_ERROR);
     }
 
-    compressionSuccess =
-        compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
-                     stream->bufferSize, planes_info.planes[0].data);
-
-    status_t res = gBuffer->unlock();
-    if (res != NO_ERROR) {
-      ALOGE("Failed to unlock graphic buffer: %d", res);
-    }
+    std::vector<uint8_t> app1ExifData =
+        createExif(Resolution(stream->width, stream->height),
+                   createThumbnail(requestSettings.thumbnailResolution,
+                                   requestSettings.thumbnailJpegQuality));
+    compressedSize = compressJpeg(
+        gBuffer->getWidth(), gBuffer->getHeight(), requestSettings.jpegQuality,
+        *yCbCrLock, app1ExifData, stream->bufferSize - sizeof(CameraBlob),
+        (*planesLock).planes[0].data);
   } else {
-    compressionSuccess =
-        compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
-                          planes_info.planes[0].data);
+    std::vector<uint8_t> app1ExifData =
+        createExif(Resolution(stream->width, stream->height));
+    compressedSize = compressBlackJpeg(
+        stream->width, stream->height, requestSettings.jpegQuality, app1ExifData,
+        stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
   }
-  AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-  ALOGV("Unlocked buffers");
-  return compressionSuccess ? ndk::ScopedAStatus::ok()
-                            : cameraStatus(Status::INTERNAL_ERROR);
+
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress JPEG image", __func__);
+    return cameraStatus(Status::INTERNAL_ERROR);
+  }
+
+  CameraBlob cameraBlob{
+      .blobId = CameraBlobId::JPEG,
+      .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+  memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+             (stream->bufferSize - sizeof(cameraBlob)),
+         &cameraBlob, sizeof(cameraBlob));
+
+  ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+        __func__, compressedSize.value());
+
+  return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index c8f61f4..86dad0b 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,19 +17,23 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 
+#include <cstdint>
 #include <deque>
 #include <future>
 #include <memory>
 #include <thread>
+#include <vector>
 
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "android/binder_auto_utils.h"
 #include "util/EglDisplayContext.h"
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/EglSurfaceTexture.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -50,11 +54,18 @@
   const sp<Fence> mFence;
 };
 
+struct RequestSettings {
+  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+  Resolution thumbnailResolution = Resolution(0, 0);
+  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+};
+
 // Represents single capture request to fill set of buffers.
 class ProcessCaptureRequestTask {
  public:
   ProcessCaptureRequestTask(
-      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers);
+      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+      const RequestSettings& RequestSettings = {});
 
   // Returns frame number corresponding to the request.
   int getFrameNumber() const;
@@ -66,9 +77,12 @@
   // so it cannot be access outside of its lifetime.
   const std::vector<CaptureRequestBuffer>& getBuffers() const;
 
+  const RequestSettings& getRequestSettings() const;
+
  private:
   const int mFrameNumber;
   const std::vector<CaptureRequestBuffer> mBuffers;
+  const RequestSettings mRequestSettings;
 };
 
 // Wraps dedicated rendering thread and rendering business with corresponding
@@ -123,13 +137,19 @@
   // TODO(b/301023410) - Refactor the actual rendering logic off this class for
   // easier testability.
 
+  // Create thumbnail with specified size for current image.
+  // The compressed image size is limited by 32KiB.
+  // Returns vector with compressed thumbnail if successful,
+  // empty vector otherwise.
+  std::vector<uint8_t> createThumbnail(Resolution resolution, int quality);
+
   // Render current image to the BLOB buffer.
   // If fence is specified, this function will block until the fence is cleared
   // before writing to the buffer.
   // Always called on render thread.
-  ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
-                                                const int bufferId,
-                                                sp<Fence> fence = nullptr);
+  ndk::ScopedAStatus renderIntoBlobStreamBuffer(
+      const int streamId, const int bufferId,
+      const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
 
   // Render current image to the YCbCr buffer.
   // If fence is specified, this function will block until the fence is cleared
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index d1ec763..dfa71f3 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -42,6 +42,7 @@
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferCache.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
 #include "aidl/android/hardware/camera/device/HalStream.h"
 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
@@ -61,7 +62,7 @@
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/TestPatternHelper.h"
 #include "util/Util.h"
 
@@ -100,10 +101,16 @@
 
 // Size of request/result metadata fast message queue.
 // Setting to 0 to always disables FMQ.
-static constexpr size_t kMetadataMsgQueueSize = 0;
+constexpr size_t kMetadataMsgQueueSize = 0;
 
 // Maximum number of buffers to use per single stream.
-static constexpr size_t kMaxStreamBuffers = 2;
+constexpr size_t kMaxStreamBuffers = 2;
+
+constexpr int32_t kDefaultJpegQuality = 80;
+constexpr int32_t kDefaultJpegThumbnailQuality = 70;
+
+// Thumbnail size (0,0) correspods to disabling thumbnail.
+const Resolution kDefaultJpegThumbnailSize(0, 0);
 
 camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
     const RequestTemplate type) {
@@ -150,6 +157,9 @@
           .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
           .setFlashMode(ANDROID_FLASH_MODE_OFF)
           .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailSize(0, 0)
           .build();
   if (metadata == nullptr) {
     ALOGE("%s: Failed to construct metadata for default request type %s",
@@ -191,6 +201,16 @@
                             }));
 }
 
+RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
+  return RequestSettings{
+      .jpegQuality = getJpegQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .thumbnailResolution =
+          getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
+      .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality)};
+}
+
 }  // namespace
 
 VirtualCameraSession::VirtualCameraSession(
@@ -298,7 +318,6 @@
         inputWidth, inputHeight, Format::YUV_420_888);
   }
 
-  mFirstRequest.store(true);
   return ndk::ScopedAStatus::ok();
 }
 
@@ -440,13 +459,25 @@
     const CaptureRequest& request) {
   ALOGD("%s: request: %s", __func__, request.toString().c_str());
 
-  if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
-    return cameraStatus(Status::ILLEGAL_ARGUMENT);
-  }
-
   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
+  RequestSettings requestSettings;
   {
     std::lock_guard<std::mutex> lock(mLock);
+
+    // If metadata it empty, last received metadata applies, if  it's non-empty
+    // update it.
+    if (!request.settings.metadata.empty()) {
+      mCurrentRequestMetadata = request.settings;
+    }
+
+    // We don't have any metadata for this request - this means we received none
+    // in first request, this is an error state.
+    if (mCurrentRequestMetadata.metadata.empty()) {
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
+
     cameraCallback = mCameraDeviceCallback;
   }
 
@@ -480,7 +511,7 @@
       return cameraStatus(Status::INTERNAL_ERROR);
     }
     mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
-        request.frameNumber, taskBuffers));
+        request.frameNumber, taskBuffers, requestSettings));
   }
 
   if (mVirtualCameraClientCallback != nullptr) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 82a7a34..556314f 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -25,6 +25,7 @@
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
 #include "aidl/android/hardware/camera/device/BnCameraDeviceSession.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "utils/Mutex.h"
 
@@ -138,7 +139,8 @@
       int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
   std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
 
-  std::atomic_bool mFirstRequest{true};
+  aidl::android::hardware::camera::device::CameraMetadata mCurrentRequestMetadata
+      GUARDED_BY(mLock);
 
   std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
 };
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 35bf752..9146d8a 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -30,6 +30,8 @@
 #include "gtest/gtest.h"
 #include "log/log_main.h"
 #include "system/camera_metadata.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -47,6 +49,7 @@
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
 using ::aidl::android::hardware::camera::device::StreamType;
 using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::testing::ElementsAre;
 using ::testing::UnorderedElementsAreArray;
 using metadata_stream_t =
     camera_metadata_enum_android_scaler_available_stream_configurations_t;
@@ -306,6 +309,16 @@
   EXPECT_FALSE(aidl_ret);
 }
 
+TEST_F(VirtualCameraDeviceTest, thumbnailSizeWithCompatibleAspectRatio) {
+  CameraMetadata metadata;
+  ASSERT_TRUE(mCamera->getCameraCharacteristics(&metadata).isOk());
+
+  // Camera is configured with VGA input, we expect 240 x 180 thumbnail size in
+  // characteristics, since it has same aspect ratio.
+  EXPECT_THAT(getJpegAvailableThumbnailSizes(metadata),
+              ElementsAre(Resolution(0, 0), Resolution(240, 180)));
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 446c679..1af8b80 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -30,7 +30,7 @@
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 
 namespace android {
 namespace companion {
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 2b19c13..98f2448 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -19,7 +19,7 @@
 
 #include <cstddef>
 #include <cstdint>
-#include <memory>
+#include <optional>
 #include <vector>
 
 #include "android/hardware_buffer.h"
@@ -34,11 +34,9 @@
 namespace virtualcamera {
 namespace {
 
-constexpr int kJpegQuality = 80;
-
 class LibJpegContext {
  public:
-  LibJpegContext(int width, int height, const size_t outBufferSize,
+  LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
                  void* outBuffer)
       : mWidth(width),
         mHeight(height),
@@ -76,7 +74,7 @@
     jpeg_set_defaults(&mCompressStruct);
 
     // Set quality and colorspace.
-    jpeg_set_quality(&mCompressStruct, kJpegQuality, 1);
+    jpeg_set_quality(&mCompressStruct, quality, 1);
     jpeg_set_colorspace(&mCompressStruct, JCS_YCbCr);
 
     // Configure RAW input mode - this let's libjpeg know we're providing raw,
@@ -94,11 +92,31 @@
     mCompressStruct.comp_info[2].v_samp_factor = 1;
   }
 
-  bool compress(const android_ycbcr& ycbr) {
+  LibJpegContext& setApp1Data(const uint8_t* app1Data, const size_t size) {
+    mApp1Data = app1Data;
+    mApp1DataSize = size;
+    return *this;
+  }
+
+  std::optional<size_t> compress(const android_ycbcr& ycbr) {
+    // TODO(b/301023410) - Add support for compressing image sizes not aligned
+    // with DCT size.
+    if (mWidth % (2 * DCTSIZE) || (mHeight % (2 * DCTSIZE))) {
+      ALOGE(
+          "%s: Compressing YUV420 image with size %dx%d not aligned with 2 * "
+          "DCTSIZE (%d) is not currently supported.",
+          __func__, mWidth, mHeight, 2 * DCTSIZE);
+      return std::nullopt;
+    }
+
+    // Chroma planes have 1/2 resolution of the original image.
+    const int cHeight = mHeight / 2;
+    const int cWidth = mWidth / 2;
+
     // Prepare arrays of pointers to scanlines of each plane.
     std::vector<JSAMPROW> yLines(mHeight);
-    std::vector<JSAMPROW> cbLines(mHeight / 2);
-    std::vector<JSAMPROW> crLines(mHeight / 2);
+    std::vector<JSAMPROW> cbLines(cHeight);
+    std::vector<JSAMPROW> crLines(cHeight);
 
     uint8_t* y = static_cast<uint8_t*>(ycbr.y);
     uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
@@ -107,23 +125,27 @@
     // Since UV samples might be interleaved (semiplanar) we need to copy
     // them to separate planes, since libjpeg doesn't directly
     // support processing semiplanar YUV.
-    const int c_samples = (mWidth / 2) * (mHeight / 2);
-    std::vector<uint8_t> cb_plane(c_samples);
-    std::vector<uint8_t> cr_plane(c_samples);
+    const int cSamples = cWidth * cHeight;
+    std::vector<uint8_t> cb_plane(cSamples);
+    std::vector<uint8_t> cr_plane(cSamples);
 
     // TODO(b/301023410) - Use libyuv or ARM SIMD for "unzipping" the data.
-    for (int i = 0; i < c_samples; ++i) {
-      cb_plane[i] = *cb;
-      cr_plane[i] = *cr;
-      cb += ycbr.chroma_step;
-      cr += ycbr.chroma_step;
+    int out_idx = 0;
+    for (int i = 0; i < cHeight; ++i) {
+      for (int j = 0; j < cWidth; ++j) {
+        cb_plane[out_idx] = cb[j * ycbr.chroma_step];
+        cr_plane[out_idx] = cr[j * ycbr.chroma_step];
+        out_idx++;
+      }
+      cb += ycbr.cstride;
+      cr += ycbr.cstride;
     }
 
     // Collect pointers to individual scanline of each plane.
     for (int i = 0; i < mHeight; ++i) {
       yLines[i] = y + i * ycbr.ystride;
     }
-    for (int i = 0; i < (mHeight / 2); ++i) {
+    for (int i = 0; i < cHeight; ++i) {
       cbLines[i] = cb_plane.data() + i * (mWidth / 2);
       crLines[i] = cr_plane.data() + i * (mWidth / 2);
     }
@@ -131,7 +153,7 @@
     return compress(yLines, cbLines, crLines);
   }
 
-  bool compressBlackImage() {
+  std::optional<size_t> compressBlackImage() {
     // We only really need to prepare one scanline for Y and one shared scanline
     // for Cb & Cr.
     std::vector<uint8_t> yLine(mWidth, 0);
@@ -165,11 +187,18 @@
   // Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
   // each vector needs to correspond to height of corresponding plane.
   //
-  // Returns true if compression is successful, false otherwise.
-  bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
-                std::vector<JSAMPROW>& crLines) {
+  // Returns size of compressed image in bytes on success, empty optional otherwise.
+  std::optional<size_t> compress(std::vector<JSAMPROW>& yLines,
+                                 std::vector<JSAMPROW>& cbLines,
+                                 std::vector<JSAMPROW>& crLines) {
     jpeg_start_compress(&mCompressStruct, TRUE);
 
+    if (mApp1Data != nullptr && mApp1DataSize > 0) {
+      ALOGV("%s: Writing exif, size %zu B", __func__, mApp1DataSize);
+      jpeg_write_marker(&mCompressStruct, JPEG_APP0 + 1,
+                        static_cast<const JOCTET*>(mApp1Data), mApp1DataSize);
+    }
+
     while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
       const uint32_t batchSize = DCTSIZE * 2;
       const uint32_t nl = mCompressStruct.next_scanline;
@@ -181,11 +210,11 @@
         ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
               __FUNCTION__, done, batchSize, mCompressStruct.next_scanline,
               mCompressStruct.image_height);
-        return false;
+        return std::nullopt;
       }
     }
     jpeg_finish_compress(&mCompressStruct);
-    return mSuccess;
+    return mEncodedSize;
   }
 
   // === libjpeg callbacks below ===
@@ -217,6 +246,10 @@
   jpeg_error_mgr mErrorMgr;
   jpeg_destination_mgr mDestinationMgr;
 
+  // APP1 data.
+  const uint8_t* mApp1Data = nullptr;
+  size_t mApp1DataSize = 0;
+
   // Dimensions of the input image.
   int mWidth;
   int mHeight;
@@ -235,15 +268,26 @@
 
 }  // namespace
 
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+std::optional<size_t> compressJpeg(const int width, const int height,
+                                   const int quality, const android_ycbcr& ycbcr,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer) {
+  LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+  if (!app1ExifData.empty()) {
+    context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+  }
+  return context.compress(ycbcr);
 }
 
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer)
-      .compressBlackImage();
+std::optional<size_t> compressBlackJpeg(const int width, const int height,
+                                        const int quality,
+                                        const std::vector<uint8_t>& app1ExifData,
+                                        size_t outBufferSize, void* outBuffer) {
+  LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+  if (!app1ExifData.empty()) {
+    context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+  }
+  return context.compressBlackImage();
 }
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index c44d0a8..e64fb4f 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -18,6 +18,7 @@
 #define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 
 #include <memory>
+#include <optional>
 
 #include "android/hardware_buffer.h"
 #include "system/graphics.h"
@@ -27,14 +28,34 @@
 namespace virtualcamera {
 
 // Jpeg-compress image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer);
+// * width - width of the image
+// * heigh - height of the image
+// * quality - 0-100, higher number corresponds to higher quality.
+// * ycbr - android_ycbr structure describing layout of input YUV420 image.
+// * app1ExifData - vector containing data to be included in APP1
+//   segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressJpeg(int width, int height, int quality,
+                                   const android_ycbcr& ycbcr,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer);
 
 // Jpeg-compress all-black image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer);
+// * width - width of the image
+// * heigh - height of the image
+// * quality - 0-100, higher number corresponds to higher quality.
+// * app1ExifData - vector containing data to be included in APP1
+//   segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressBlackJpeg(int width, int height, int quality,
+                                        const std::vector<uint8_t>& app1ExifData,
+                                        size_t outBufferSize, void* outBuffer);
 
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
similarity index 88%
rename from services/camera/virtualcamera/util/MetadataBuilder.cc
rename to services/camera/virtualcamera/util/MetadataUtil.cc
index 2bbd58c..d223f17 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -15,9 +15,9 @@
  */
 
 // #define LOG_NDEBUG 0
-#define LOG_TAG "MetadataBuilder"
+#define LOG_TAG "MetadataUtil"
 
-#include "MetadataBuilder.h"
+#include "MetadataUtil.h"
 
 #include <algorithm>
 #include <cstdint>
@@ -396,6 +396,22 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailSize(const int width,
+                                                       const int height) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_SIZE] = std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
     const int32_t maxRawStreams, const int32_t maxProcessedStreams,
     const int32_t maxStallStreams) {
@@ -597,6 +613,67 @@
   return aidlMetadata;
 }
 
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_QUALITY, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_SIZE,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return Resolution(entry.data.i32[0], entry.data.i32[1]);
+}
+
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, &entry) != OK) {
+    return {};
+  }
+
+  std::vector<Resolution> thumbnailSizes;
+  thumbnailSizes.reserve(entry.count / 2);
+  for (int i = 0; i < entry.count; i += 2) {
+    thumbnailSizes.emplace_back(entry.data.i32[i], entry.data.i32[i + 1]);
+  }
+  return thumbnailSizes;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataUtil.h
similarity index 90%
rename from services/camera/virtualcamera/util/MetadataBuilder.h
rename to services/camera/virtualcamera/util/MetadataUtil.h
index df99089..9ddfd81 100644
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -14,8 +14,8 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
 
 #include <chrono>
 #include <cstdint>
@@ -98,7 +98,8 @@
           sensorReadoutTimestamp);
 
   // See ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableFocalLengths(const std::vector<float>& focalLengths);
+  MetadataBuilder& setAvailableFocalLengths(
+      const std::vector<float>& focalLengths);
 
   // See ANDROID_LENS_FOCAL_LENGTH in CameraMetadataTag.aidl.
   MetadataBuilder& setFocalLength(float focalLength);
@@ -278,6 +279,15 @@
   MetadataBuilder& setJpegAvailableThumbnailSizes(
       const std::vector<Resolution>& thumbnailSizes);
 
+  // See JPEG_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegQuality(uint8_t quality);
+
+  // See JPEG_THUMBNAIL_SIZE in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailSize(int width, int height);
+
+  // See JPEG_THUMBNAIL_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailQuality(uint8_t quality);
+
   // The maximum numbers of different types of output streams
   // that can be configured and used simultaneously by a camera device.
   //
@@ -353,8 +363,25 @@
   bool mExtendWithAvailableCharacteristicsKeys = false;
 };
 
+// Returns JPEG_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_AVAILABLE_THUMBNAIL_SIZES from metadata, or nullopt if the key
+// is not present.
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
 
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 2d0545d..b2048bc 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -20,8 +20,13 @@
 
 #include <algorithm>
 #include <array>
+#include <cstdint>
+#include <memory>
 
+#include "android/hardware_buffer.h"
 #include "jpeglib.h"
+#include "ui/GraphicBuffer.h"
+#include "utils/Errors.h"
 
 namespace android {
 namespace companion {
@@ -40,6 +45,82 @@
 constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
                                                   Format::RGBA_8888};
 
+YCbCrLockGuard::YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                               const uint32_t usageFlags)
+    : mHwBuffer(hwBuffer) {
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+  mLockStatus = gBuffer->lockYCbCr(usageFlags, &mYCbCr);
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+YCbCrLockGuard::~YCbCrLockGuard() {
+  if (getStatus() != OK) {
+    return;
+  }
+
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    return;
+  }
+  gBuffer->unlock();
+  status_t status = gBuffer->unlock();
+  if (status != NO_ERROR) {
+    ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
+  }
+}
+
+status_t YCbCrLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const android_ycbcr& YCbCrLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked YCbCrLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mYCbCr;
+}
+
+PlanesLockGuard::PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                                 const uint64_t usageFlags, sp<Fence> fence) {
+  if (hwBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+
+  const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+  mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
+      hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+PlanesLockGuard::~PlanesLockGuard() {
+  if (getStatus() != OK || mHwBuffer == nullptr) {
+    return;
+  }
+  AHardwareBuffer_unlock(mHwBuffer.get(), /*fence=*/nullptr);
+}
+
+int PlanesLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const AHardwareBuffer_Planes& PlanesLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked PlanesLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mPlanes;
+}
+
 sp<Fence> importFence(const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.size() != 1) {
     return sp<Fence>::make();
@@ -79,6 +160,10 @@
   return true;
 }
 
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution) {
+  return os << resolution.width << "x" << resolution.height;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index 9f81bb1..d5b0b1f 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -18,17 +18,80 @@
 #define ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 
 #include <cstdint>
+#include <memory>
 
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "android/binder_auto_utils.h"
+#include "android/hardware_buffer.h"
+#include "system/graphics.h"
 #include "ui/Fence.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
+// RAII utility class to safely lock AHardwareBuffer and obtain android_ycbcr
+// structure describing YUV plane layout.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class YCbCrLockGuard {
+ public:
+  YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer, uint32_t usageFlags);
+  YCbCrLockGuard(YCbCrLockGuard&& other) = default;
+  ~YCbCrLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns android_ycbcr structure
+  // describing the layout.
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const android_ycbcr& operator*() const;
+
+  // Disable copy.
+  YCbCrLockGuard(const YCbCrLockGuard&) = delete;
+  YCbCrLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  android_ycbcr mYCbCr = {};
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
+// RAII utility class to safely lock AHardwareBuffer and obtain
+// AHardwareBuffer_Planes (Suitable for interacting with RGBA / BLOB buffers.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class PlanesLockGuard {
+ public:
+  PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                  uint64_t usageFlags, sp<Fence> fence = nullptr);
+  PlanesLockGuard(PlanesLockGuard&& other) = default;
+  ~PlanesLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns AHardwareBuffer_Planes
+  // structure describing the layout.
+  //
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const AHardwareBuffer_Planes& operator*() const;
+
+  // Disable copy.
+  PlanesLockGuard(const PlanesLockGuard&) = delete;
+  PlanesLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  AHardwareBuffer_Planes mPlanes;
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
 // Converts camera AIDL status to ndk::ScopedAStatus
 inline ndk::ScopedAStatus cameraStatus(
     const ::aidl::android::hardware::camera::common::Status status) {
@@ -54,6 +117,7 @@
 
 // Representation of resolution / size.
 struct Resolution {
+  Resolution() = default;
   Resolution(const int w, const int h) : width(w), height(h) {
   }
 
@@ -69,10 +133,12 @@
     return width == other.width && height == other.height;
   }
 
-  const int width;
-  const int height;
+  int width = 0;
+  int height = 0;
 };
 
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android