Merge "APM: apply volumes when abs volume stream changes" into main
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 48a7338..daa2656 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -102,6 +102,12 @@
return mMirrorMode;
}
+ if (mGbps.size() != mMirrorModeForProducers.size()) {
+ ALOGE("%s: mGbps size doesn't match mMirrorModeForProducers: %zu vs %zu",
+ __FUNCTION__, mGbps.size(), mMirrorModeForProducers.size());
+ return mMirrorMode;
+ }
+
// Use per-producer mirror mode if available.
for (size_t i = 0; i < mGbps.size(); i++) {
if (mGbps[i] == surface) {
@@ -350,6 +356,7 @@
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
+ mMirrorModeForProducers.push_back(mMirrorMode);
mUseReadoutTimestamp = false;
mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
mDataspace = 0;
@@ -367,9 +374,9 @@
mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
- mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
- mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
- mUsage(0) { }
+ mMirrorMode(MIRROR_MODE_AUTO), mMirrorModeForProducers(gbps.size(), mMirrorMode),
+ mUseReadoutTimestamp(false), mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+ mDataspace(0), mUsage(0) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 4353521..e2b28dc 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -335,7 +335,7 @@
void CCodecBufferChannel::setComponent(
const std::shared_ptr<Codec2Client::Component> &component) {
- mComponent = component;
+ std::atomic_store(&mComponent, component);
mComponentName = component->getName() + StringPrintf("#%d", int(uintptr_t(component.get()) % 997));
mName = mComponentName.c_str();
}
@@ -351,7 +351,7 @@
inputSurface->numProcessingBuffersBalance = 0;
inputSurface->surface = surface;
mHasInputSurface = true;
- return inputSurface->surface->connect(mComponent);
+ return inputSurface->surface->connect(std::atomic_load(&mComponent));
}
status_t CCodecBufferChannel::signalEndOfInputStream() {
@@ -547,7 +547,7 @@
now);
}
}
- err = mComponent->queue(&items);
+ err = std::atomic_load(&mComponent)->queue(&items);
}
if (err != C2_OK) {
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
@@ -1457,7 +1457,7 @@
qbi.setSurfaceDamage(Region::INVALID_REGION); // we don't have dirty regions
qbi.getFrameTimestamps = true; // we need to know when a frame is rendered
IGraphicBufferProducer::QueueBufferOutput qbo;
- status_t result = mComponent->queueToOutputSurface(block, qbi, &qbo);
+ status_t result = std::atomic_load(&mComponent)->queueToOutputSurface(block, qbi, &qbo);
if (result != OK) {
ALOGI("[%s] queueBuffer failed: %d", mName, result);
if (result == NO_INIT) {
@@ -1596,7 +1596,7 @@
void CCodecBufferChannel::pollForRenderedBuffers() {
FrameEventHistoryDelta delta;
- mComponent->pollForRenderedFrames(&delta);
+ std::atomic_load(&mComponent)->pollForRenderedFrames(&delta);
processRenderedFrames(delta);
}
@@ -1605,7 +1605,7 @@
// knowing the internal state of CCodec/CCodecBufferChannel,
// prevent mComponent from being destroyed by holding the shared reference
// during this interface being executed.
- std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
if (comp) {
SurfaceCallbackHandler::GetInstance().post(
SurfaceCallbackHandler::ON_BUFFER_RELEASED, comp, generation);
@@ -1617,7 +1617,7 @@
// knowing the internal state of CCodec/CCodecBufferChannel,
// prevent mComponent from being destroyed by holding the shared reference
// during this interface being executed.
- std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
if (comp) {
SurfaceCallbackHandler::GetInstance().post(
SurfaceCallbackHandler::ON_BUFFER_ATTACHED, comp, generation);
@@ -1691,7 +1691,7 @@
C2ActualPipelineDelayTuning pipelineDelay(0);
C2SecureModeTuning secureMode(C2Config::SM_UNPROTECTED);
- c2_status_t err = mComponent->query(
+ c2_status_t err = std::atomic_load(&mComponent)->query(
{
&iStreamFormat,
&oStreamFormat,
@@ -1722,7 +1722,7 @@
size_t numOutputSlots = outputDelayValue + kSmoothnessFactor;
// TODO: get this from input format
- bool secure = mComponent->getName().find(".secure") != std::string::npos;
+ bool secure = std::atomic_load(&mComponent)->getName().find(".secure") != std::string::npos;
// secure mode is a static parameter (shall not change in the executing state)
mSendEncryptedInfoBuffer = secureMode.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED;
@@ -1768,7 +1768,7 @@
channelCount.invalidate();
pcmEncoding.invalidate();
}
- err = mComponent->query(stackParams,
+ err = std::atomic_load(&mComponent)->query(stackParams,
{ C2PortAllocatorsTuning::input::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1929,7 +1929,7 @@
// query C2PortAllocatorsTuning::output from component, or use default allocator if
// unsuccessful.
std::vector<std::unique_ptr<C2Param>> params;
- err = mComponent->query({ },
+ err = std::atomic_load(&mComponent)->query({ },
{ C2PortAllocatorsTuning::output::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1957,7 +1957,7 @@
// if unsuccessful.
if (outputSurface) {
params.clear();
- err = mComponent->query({ },
+ err = std::atomic_load(&mComponent)->query({ },
{ C2PortSurfaceAllocatorTuning::output::PARAM_TYPE },
C2_DONT_BLOCK,
¶ms);
@@ -1988,7 +1988,7 @@
}
if ((poolMask >> pools->outputAllocatorId) & 1) {
- err = mComponent->createBlockPool(
+ err = std::atomic_load(&mComponent)->createBlockPool(
pools->outputAllocatorId, &pools->outputPoolId, &pools->outputPoolIntf);
ALOGI("[%s] Created output block pool with allocatorID %u => poolID %llu - %s",
mName, pools->outputAllocatorId,
@@ -2009,7 +2009,8 @@
C2PortBlockPoolsTuning::output::AllocUnique({ pools->outputPoolId });
std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = mComponent->config({ poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
+ err = std::atomic_load(&mComponent)->config(
+ { poolIdsTuning.get() }, C2_MAY_BLOCK, &failures);
ALOGD("[%s] Configured output block pool ids %llu => %s",
mName, (unsigned long long)poolIdsTuning->m.values[0], asString(err));
outputPoolId_ = pools->outputPoolId;
@@ -2017,7 +2018,7 @@
if (prevOutputPoolId != C2BlockPool::BASIC_LINEAR
&& prevOutputPoolId != C2BlockPool::BASIC_GRAPHIC) {
- c2_status_t err = mComponent->destroyBlockPool(prevOutputPoolId);
+ c2_status_t err = std::atomic_load(&mComponent)->destroyBlockPool(prevOutputPoolId);
if (err != C2_OK) {
ALOGW("Failed to clean up previous block pool %llu - %s (%d)\n",
(unsigned long long) prevOutputPoolId, asString(err), err);
@@ -2049,7 +2050,7 @@
// Try to set output surface to created block pool if given.
if (outputSurface) {
- mComponent->setOutputSurface(
+ std::atomic_load(&mComponent)->setOutputSurface(
outputPoolId_,
outputSurface,
outputGeneration,
@@ -2058,7 +2059,7 @@
// configure CPU read consumer usage
C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+ err = std::atomic_load(&mComponent)->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
// do not print error message for now as most components may not yet
// support this setting
ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
@@ -2180,7 +2181,8 @@
}
C2StreamBufferTypeSetting::output oStreamFormat(0u);
C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
- c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
+ c2_status_t err = std::atomic_load(&mComponent)->query(
+ { &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
if (err != C2_OK && err != C2_BAD_INDEX) {
return UNKNOWN_ERROR;
}
@@ -2198,7 +2200,7 @@
now);
}
}
- err = mComponent->queue(&flushedConfigs);
+ err = std::atomic_load(&mComponent)->queue(&flushedConfigs);
if (err != C2_OK) {
ALOGW("[%s] Error while queueing a flushed config", mName);
return UNKNOWN_ERROR;
@@ -2249,7 +2251,8 @@
Mutexed<BlockPools>::Locked pools(mBlockPools);
outputPoolId = pools->outputPoolId;
}
- if (mComponent) mComponent->stopUsingOutputSurface(outputPoolId);
+ std::shared_ptr<Codec2Client::Component> comp = std::atomic_load(&mComponent);
+ if (comp) comp->stopUsingOutputSurface(outputPoolId);
if (pushBlankBuffer) {
sp<ANativeWindow> anw = static_cast<ANativeWindow *>(surface.get());
@@ -2283,7 +2286,8 @@
void CCodecBufferChannel::release() {
mInfoBuffers.clear();
- mComponent.reset();
+ std::shared_ptr<Codec2Client::Component> nullComp;
+ std::atomic_store(&mComponent, nullComp);
mInputAllocator.reset();
mOutputSurface.lock()->surface.clear();
{
@@ -2605,7 +2609,7 @@
}
}
if (maxDequeueCount > 0) {
- mComponent->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
+ std::atomic_load(&mComponent)->setOutputSurfaceMaxDequeueCount(maxDequeueCount);
}
}
@@ -2853,7 +2857,7 @@
}
if (outputPoolIntf) {
- if (mComponent->setOutputSurface(
+ if (std::atomic_load(&mComponent)->setOutputSurface(
outputPoolId,
producer,
generation,
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 1b10f81..d27d52a 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -299,7 +299,7 @@
return "Type Id Active Client(pid/uid) Session Port Id S Flags "
" Format Chn mask SRate "
"ST Usg CT "
- " G db L dB R dB VS dB PortVol dB PortMuted"
+ " G db L dB R dB VS dB PortVol dB PortMuted "
" Server FrmCnt FrmRdy F Underruns Flushed BitPerfect InternalMute"
" Latency\n"sv;
}
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ee90796..867561a 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1090,7 +1090,7 @@
result.appendFormat("%7s %7u/%7u %7u %7u %2s 0x%03X "
"%08X %08X %6u "
"%2u %3x %2x "
- "%5.2g %5.2g %5.2g %5.2g%c %11.2g %12s"
+ "%5.2g %5.2g %5.2g %5.2g%c %11.2g %10s "
"%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
active ? "yes" : "no",
mClient ? mClient->pid() : getpid() ,
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 403d1ba..1082d31 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -156,40 +156,21 @@
return EngineBase::setForceUse(usage, config);
}
-bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const {
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices) const {
+ // SCO is considered active if:
+ // 1) a SCO device is connected
+ // 2) the preferred device for PHONE strategy is BT SCO: this is controlled only by java
+ // AudioService and is only true if the SCO audio link as been confirmed active by BT.
if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
return false;
}
- // SCO is active if:
- // 1) we are in a call and SCO is the preferred device for PHONE strategy
- if (isInCall() && audio_is_bluetooth_out_sco_device(
+
+ if (!audio_is_bluetooth_out_sco_device(
getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
- return true;
+ return false;
}
- // 2) A strategy for which the preferred device is SCO is active
- for (const auto &ps : getOrderedProductStrategies()) {
- if (outputs.isStrategyActive(ps) &&
- !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
- .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
- return true;
- }
- }
- // 3) a ringtone is active and SCO is used for ringing
- if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
- && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
- == AUDIO_POLICY_FORCE_BT_SCO)) {
- return true;
- }
- // 4) an active input is routed from SCO
- DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
- const auto &inputs = getApmObserver()->getInputs();
- if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
- AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
- return true;
- }
- return false;
+ return true;
}
void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
@@ -200,7 +181,7 @@
if (com::android::media::audioserver::use_bt_sco_for_media()) {
// remove A2DP and LE Audio devices whenever BT SCO is in use
- if (isBtScoActive(availableOutputDevices, outputs)) {
+ if (isBtScoActive(availableOutputDevices)) {
availableOutputDevices.remove(
availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
availableOutputDevices.remove(
@@ -486,6 +467,18 @@
// Get the last connected device of wired and bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
getLastRemovableMediaDevices(GROUP_NONE, excludedDevices));
+ if (com::android::media::audioserver::use_bt_sco_for_media()) {
+ if (isBtScoActive(availableOutputDevices)
+ && !(devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllA2dpSet()).isEmpty()
+ && devices2.getDevicesFromTypes(
+ getAudioDeviceOutAllBleSet()).isEmpty())) {
+ devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+ { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+ AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+ }
+ }
} else {
// Get the last connected device of wired except bluetooth a2dp
devices2 = availableOutputDevices.getFirstDevicesFromTypes(
@@ -493,15 +486,6 @@
}
}
- if (com::android::media::audioserver::use_bt_sco_for_media()) {
- if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
- devices2 = availableOutputDevices.getFirstDevicesFromTypes(
- { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
- AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
- }
- }
-
if ((devices2.isEmpty()) &&
(getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
devices2 = availableOutputDevices.getDevicesFromType(
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 862b5fd..e9c71dd 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -95,8 +95,7 @@
DeviceVector getDisabledDevicesForInputSource(
const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
- bool isBtScoActive(DeviceVector& availableOutputDevices,
- const SwAudioOutputCollection &outputs) const;
+ bool isBtScoActive(DeviceVector& availableOutputDevices) const;
std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
};
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
index a61f553..da1c208 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -16,6 +16,7 @@
#include "VirtualCameraCaptureResult.h"
#include <cstdint>
+#include <memory>
#include "VirtualCameraCaptureRequest.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -34,7 +35,7 @@
} // namespace
-CameraMetadata createCaptureResultMetadata(
+std::unique_ptr<CameraMetadata> createCaptureResultMetadata(
const std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
const Resolution reportedSensorSize) {
@@ -109,9 +110,9 @@
if (metadata == nullptr) {
ALOGE("%s: Failed to build capture result metadata", __func__);
- return CameraMetadata();
+ return std::make_unique<CameraMetadata>();
}
- return std::move(*metadata);
+ return metadata;
}
} // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
index 9e5b4d7..c3978f7 100644
--- a/services/camera/virtualcamera/VirtualCameraCaptureResult.h
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -18,21 +18,10 @@
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
#include <chrono>
-#include <cstdint>
#include <cstring>
-#include <future>
#include <memory>
-#include <mutex>
-#include <thread>
-#include <utility>
-#include <vector>
-#include "Exif.h"
-#include "GLES/gl.h"
#include "VirtualCameraCaptureRequest.h"
-#include "VirtualCameraDevice.h"
-#include "VirtualCameraRenderThread.h"
-#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
namespace android {
@@ -41,7 +30,7 @@
// Construct the Metadata for the Capture result based on the request
// settings, timestamp and reported sensore size
-::aidl::android::hardware::camera::device::CameraMetadata
+std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
const RequestSettings& requestSettings,
Resolution reportedSensorSize);
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index becba90..836d4e0 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -100,6 +100,9 @@
static constexpr UpdateTextureTask kUpdateTextureTask;
+// The number of nanosecond to wait for the first frame to be drawn on the input surface
+static constexpr std::chrono::nanoseconds kMaxWaitFirstFrame = 3s;
+
NotifyMsg createShutterNotifyMsg(int frameNumber,
std::chrono::nanoseconds timestamp) {
NotifyMsg msg;
@@ -110,11 +113,13 @@
return msg;
}
-NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {
+// Create a NotifyMsg for an error case. The default error is ERROR_BUFFER.
+NotifyMsg createErrorNotifyMsg(int frameNumber, int streamId,
+ ErrorCode errorCode = ErrorCode::ERROR_BUFFER) {
NotifyMsg msg;
msg.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = frameNumber,
.errorStreamId = streamId,
- .errorCode = ErrorCode::ERROR_BUFFER});
+ .errorCode = errorCode});
return msg;
}
@@ -421,10 +426,15 @@
}
// Calculate the maximal amount of time we can afford to wait for next frame.
+ const bool isFirstFrameDrawn = mEglSurfaceTexture->isFirstFrameDrawn();
+ ALOGV("First Frame Drawn: %s", isFirstFrameDrawn ? "Yes" : "No");
+
const std::chrono::nanoseconds maxFrameDuration =
- getMaxFrameDuration(request.getRequestSettings());
+ isFirstFrameDrawn ? getMaxFrameDuration(request.getRequestSettings())
+ : kMaxWaitFirstFrame;
const std::chrono::nanoseconds elapsedDuration =
- timestamp - lastAcquisitionTimestamp;
+ isFirstFrameDrawn ? timestamp - lastAcquisitionTimestamp : 0ns;
+
if (elapsedDuration < maxFrameDuration) {
// We can afford to wait for next frame.
// Note that if there's already new frame in the input Surface, the call
@@ -434,6 +444,17 @@
timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::steady_clock::now().time_since_epoch());
if (!gotNewFrame) {
+ if (!mEglSurfaceTexture->isFirstFrameDrawn()) {
+ // We don't have any input ever drawn. This is considered as an error
+ // case. Notify the framework of the failure and return early.
+ ALOGW("Timed out waiting for first frame to be drawn.");
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(), /* metadata = */ nullptr);
+ notifyTimeout(request, *captureResult);
+ submitCaptureResult(std::move(captureResult));
+ return;
+ }
+
ALOGV(
"%s: No new frame received on input surface after waiting for "
"%" PRIu64 "ns, repeating last frame.",
@@ -457,75 +478,20 @@
captureTimestamp.count(), timestamp.count());
}
- CaptureResult captureResult;
- captureResult.fmqResultSize = 0;
- captureResult.frameNumber = request.getFrameNumber();
- // Partial result needs to be set to 1 when metadata are present.
- captureResult.partialResult = 1;
- captureResult.inputBuffer.streamId = -1;
- captureResult.physicalCameraMetadata.resize(0);
- captureResult.result = createCaptureResultMetadata(
- captureTimestamp, request.getRequestSettings(), mReportedSensorSize);
+ std::unique_ptr<CaptureResult> captureResult = createCaptureResult(
+ request.getFrameNumber(),
+ createCaptureResultMetadata(
+ captureTimestamp, request.getRequestSettings(), mReportedSensorSize));
+ renderOutputBuffers(request, *captureResult);
- const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
- captureResult.outputBuffers.resize(buffers.size());
-
- for (int i = 0; i < buffers.size(); ++i) {
- const CaptureRequestBuffer& reqBuffer = buffers[i];
- StreamBuffer& resBuffer = captureResult.outputBuffers[i];
- resBuffer.streamId = reqBuffer.getStreamId();
- resBuffer.bufferId = reqBuffer.getBufferId();
- resBuffer.status = BufferStatus::OK;
-
- const std::optional<Stream> streamConfig =
- mSessionContext.getStreamConfig(reqBuffer.getStreamId());
-
- if (!streamConfig.has_value()) {
- resBuffer.status = BufferStatus::ERROR;
- continue;
- }
-
- auto status = streamConfig->format == PixelFormat::BLOB
- ? renderIntoBlobStreamBuffer(
- reqBuffer.getStreamId(), reqBuffer.getBufferId(),
- captureResult.result, request.getRequestSettings(),
- reqBuffer.getFence())
- : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
- reqBuffer.getBufferId(),
- reqBuffer.getFence());
- if (!status.isOk()) {
- resBuffer.status = BufferStatus::ERROR;
- }
- }
-
- std::vector<NotifyMsg> notifyMsg{
- createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
- for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
- if (resBuffer.status != BufferStatus::OK) {
- notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
- resBuffer.streamId));
- }
- }
-
- auto status = mCameraDeviceCallback->notify(notifyMsg);
+ auto status = notifyShutter(request, *captureResult, captureTimestamp);
if (!status.isOk()) {
ALOGE("%s: notify call failed: %s", __func__,
status.getDescription().c_str());
return;
}
- std::vector<::aidl::android::hardware::camera::device::CaptureResult>
- captureResults(1);
- captureResults[0] = std::move(captureResult);
-
- status = mCameraDeviceCallback->processCaptureResult(captureResults);
- if (!status.isOk()) {
- ALOGE("%s: processCaptureResult call failed: %s", __func__,
- status.getDescription().c_str());
- return;
- }
-
- ALOGV("%s: Successfully called processCaptureResult", __func__);
+ submitCaptureResult(std::move(captureResult));
}
std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
@@ -574,6 +540,106 @@
return surfaceTimestamp;
}
+std::unique_ptr<CaptureResult> VirtualCameraRenderThread::createCaptureResult(
+ int frameNumber, std::unique_ptr<CameraMetadata> metadata) {
+ std::unique_ptr<CaptureResult> captureResult =
+ std::make_unique<CaptureResult>();
+ captureResult->fmqResultSize = 0;
+ captureResult->frameNumber = frameNumber;
+ // Partial result needs to be set to 1 when metadata are present.
+ captureResult->partialResult = 1;
+ captureResult->inputBuffer.streamId = -1;
+ captureResult->physicalCameraMetadata.resize(0);
+ captureResult->result = metadata != nullptr ? *metadata : CameraMetadata();
+ return captureResult;
+}
+
+void VirtualCameraRenderThread::renderOutputBuffers(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::OK;
+
+ const std::optional<Stream> streamConfig =
+ mSessionContext.getStreamConfig(reqBuffer.getStreamId());
+
+ if (!streamConfig.has_value()) {
+ resBuffer.status = BufferStatus::ERROR;
+ continue;
+ }
+
+ auto status = streamConfig->format == PixelFormat::BLOB
+ ? renderIntoBlobStreamBuffer(
+ reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+ captureResult.result, request.getRequestSettings(),
+ reqBuffer.getFence())
+ : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
+ reqBuffer.getBufferId(),
+ reqBuffer.getFence());
+ if (!status.isOk()) {
+ resBuffer.status = BufferStatus::ERROR;
+ }
+ }
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyTimeout(
+ const ProcessCaptureRequestTask& request, CaptureResult& captureResult) {
+ const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
+ captureResult.outputBuffers.resize(buffers.size());
+
+ std::vector<NotifyMsg> notifyMsgs;
+
+ for (int i = 0; i < buffers.size(); ++i) {
+ const CaptureRequestBuffer& reqBuffer = buffers[i];
+ StreamBuffer& resBuffer = captureResult.outputBuffers[i];
+ resBuffer.streamId = reqBuffer.getStreamId();
+ resBuffer.bufferId = reqBuffer.getBufferId();
+ resBuffer.status = BufferStatus::ERROR;
+ notifyMsgs.push_back(createErrorNotifyMsg(
+ request.getFrameNumber(), resBuffer.streamId, ErrorCode::ERROR_REQUEST));
+ }
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::notifyShutter(
+ const ProcessCaptureRequestTask& request, const CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp) {
+ std::vector<NotifyMsg> notifyMsgs{
+ createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
+ for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
+ if (resBuffer.status != BufferStatus::OK) {
+ notifyMsgs.push_back(
+ createErrorNotifyMsg(request.getFrameNumber(), resBuffer.streamId));
+ }
+ }
+
+ return mCameraDeviceCallback->notify(notifyMsgs);
+}
+
+::ndk::ScopedAStatus VirtualCameraRenderThread::submitCaptureResult(
+ std::unique_ptr<CaptureResult> captureResult) {
+ std::vector<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResults;
+ captureResults.push_back(std::move(*captureResult));
+
+ ::ndk::ScopedAStatus status =
+ mCameraDeviceCallback->processCaptureResult(captureResults);
+ if (!status.isOk()) {
+ ALOGE("%s: processCaptureResult call failed: %s", __func__,
+ status.getDescription().c_str());
+ return status;
+ }
+
+ ALOGV("%s: Successfully called processCaptureResult", __func__);
+ return status;
+}
+
void VirtualCameraRenderThread::flushCaptureRequest(
const ProcessCaptureRequestTask& request) {
CaptureResult captureResult;
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 1fb4e84..4cad39e 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -19,6 +19,7 @@
#include <atomic>
#include <chrono>
+#include <cstddef>
#include <cstdint>
#include <deque>
#include <future>
@@ -205,6 +206,35 @@
std::chrono::nanoseconds getSurfaceTimestamp(
std::chrono::nanoseconds timeSinceLastFrame);
+ // Build a default capture result object populating the metadata from the request.
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ createCaptureResult(
+ int frameNumber,
+ std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+ metadata);
+
+ // Renders the images from the input surface into the request's buffers.
+ void renderOutputBuffers(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Notify a shutter event for all the buffers in this request.
+ ::ndk::ScopedAStatus notifyShutter(
+ const ProcessCaptureRequestTask& request,
+ const ::aidl::android::hardware::camera::device::CaptureResult& captureResult,
+ std::chrono::nanoseconds captureTimestamp);
+
+ // Notify a timeout error for this request. The capture result still needs to
+ // be submitted after this call.
+ ::ndk::ScopedAStatus notifyTimeout(
+ const ProcessCaptureRequestTask& request,
+ ::aidl::android::hardware::camera::device::CaptureResult& captureResult);
+
+ // Submit the capture result to the camera callback.
+ ::ndk::ScopedAStatus submitCaptureResult(
+ std::unique_ptr<::aidl::android::hardware::camera::device::CaptureResult>
+ captureResult);
+
// Camera callback
const std::shared_ptr<
::aidl::android::hardware::camera::device::ICameraDeviceCallback>
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index be36ec4..fc469a0 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -105,6 +105,10 @@
return std::chrono::nanoseconds(mGlConsumer->getTimestamp());
}
+bool EglSurfaceTexture::isFirstFrameDrawn() {
+ return mGlConsumer->getFrameNumber() > 0;
+}
+
GLuint EglSurfaceTexture::updateTexture() {
int previousFrameId;
int framesAdvance = 0;
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index c1f1169..9f75315 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -86,6 +86,9 @@
// set by the most recent call to updateTexture.
std::chrono::nanoseconds getTimestamp();
+ // Returns true is a frame has ever been drawn on this surface.
+ bool isFirstFrameDrawn();
+
private:
#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
sp<IGraphicBufferProducer> mBufferProducer;