Merge "CCodec/Codec2Client: signal surface consumer usage to component" into sc-dev
diff --git a/drm/drmserver/drmserver.rc b/drm/drmserver/drmserver.rc
index eb176c1..de46fb9 100644
--- a/drm/drmserver/drmserver.rc
+++ b/drm/drmserver/drmserver.rc
@@ -1,12 +1,5 @@
service drm /system/bin/drmserver
- disabled
class main
user drm
group drm system inet drmrpc readproc
writepid /dev/cpuset/foreground/tasks
-
-on property:drm.service.enabled=true
- start drm
-
-on property:drm.service.enabled=1
- start drm
diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp
index 38591bc..b0a441b 100644
--- a/drm/libdrmframework/DrmManagerClientImpl.cpp
+++ b/drm/libdrmframework/DrmManagerClientImpl.cpp
@@ -52,22 +52,25 @@
const sp<IDrmManagerService>& DrmManagerClientImpl::getDrmManagerService() {
Mutex::Autolock lock(sMutex);
if (NULL == sDrmManagerService.get()) {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("drm.service.enabled", value, NULL) == 0) {
+ // Drm is undefined for this device
+ return sDrmManagerService;
+ }
+
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder;
- for(int i = 0; i < 10; i++) {
+ do {
binder = sm->getService(String16("drm.drmManager"));
if (binder != 0) {
break;
}
- ALOGW("DrmManagerService not published, waiting... %d", i);
+ ALOGW("DrmManagerService not published, waiting...");
struct timespec reqt;
reqt.tv_sec = 0;
reqt.tv_nsec = 500000000; //0.5 sec
nanosleep(&reqt, NULL);
- }
- if (binder == NULL) {
- return sDrmManagerService;
- }
+ } while (true);
if (NULL == sDeathNotifier.get()) {
sDeathNotifier = new DeathNotifier();
}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 0c4608a..83a4a37 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2531,12 +2531,14 @@
ALOGW("%s input %d client %d already stopped", __FUNCTION__, input, client->portId());
return INVALID_OPERATION;
}
-
+ auto old_source = inputDesc->source();
inputDesc->setClientActive(client, false);
inputDesc->stop();
if (inputDesc->isActive()) {
- setInputDevice(input, getNewInputDevice(inputDesc), false /* force */);
+ auto current_source = inputDesc->source();
+ setInputDevice(input, getNewInputDevice(inputDesc),
+ old_source != current_source /* force */);
} else {
sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
// if input maps to a dynamic policy with an activity listener, notify of state change
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 19b54e0..a66a592 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -42,17 +42,29 @@
mDepthBufferAcquired(false),
mBlobBufferAcquired(false),
mProducerListener(new ProducerListener()),
- mMaxJpegSize(-1),
+ mMaxJpegBufferSize(-1),
+ mUHRMaxJpegBufferSize(-1),
mIsLogicalCamera(false) {
if (device != nullptr) {
CameraMetadata staticInfo = device->info();
auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
if (entry.count > 0) {
- mMaxJpegSize = entry.data.i32[0];
+ mMaxJpegBufferSize = entry.data.i32[0];
} else {
ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
}
+ mUHRMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+ /*ultraHighResolution*/true);
+ mDefaultMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+ /*isUltraHighResolution*/false);
+
+ mUHRMaxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+ mMaxJpegBufferSize);
+
entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
if (entry.count == 5) {
mIntrinsicCalibration.reserve(5);
@@ -243,13 +255,22 @@
jpegSize = inputFrame.jpegBuffer.width;
}
- size_t maxDepthJpegSize;
- if (mMaxJpegSize > 0) {
- maxDepthJpegSize = mMaxJpegSize;
+ size_t maxDepthJpegBufferSize = 0;
+ if (mMaxJpegBufferSize > 0) {
+ // If this is an ultra high resolution sensor and the input frames size
+ // is > default res jpeg.
+ if (mUHRMaxJpegSize.width != 0 &&
+ inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+ mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+ maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
+ } else {
+ maxDepthJpegBufferSize = mMaxJpegBufferSize;
+ }
} else {
- maxDepthJpegSize = std::max<size_t> (jpegSize,
+ maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
}
+
uint8_t jpegQuality = 100;
auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
if (entry.count > 0) {
@@ -259,7 +280,7 @@
// The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
// jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
// max jpeg size.
- size_t finalJpegBufferSize = maxDepthJpegSize * 3;
+ size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
!= OK) {
@@ -302,7 +323,7 @@
depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
depthPhoto.mJpegQuality = jpegQuality;
depthPhoto.mIsLogical = mIsLogicalCamera;
- depthPhoto.mMaxJpegSize = maxDepthJpegSize;
+ depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
// The camera intrinsic calibration layout is as follows:
// [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
if (mIntrinsicCalibration.size() == 5) {
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index a520bbf..c1c75c1 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -132,7 +132,12 @@
sp<Surface> mDepthSurface, mBlobSurface, mOutputSurface;
sp<ProducerListener> mProducerListener;
- ssize_t mMaxJpegSize;
+ ssize_t mMaxJpegBufferSize;
+ ssize_t mUHRMaxJpegBufferSize;
+
+ camera3::Size mDefaultMaxJpegSize;
+ camera3::Size mUHRMaxJpegSize;
+
std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
std::vector<float> mIntrinsicCalibration, mLensDistortion;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index aefc75e..0101c58 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -499,42 +499,6 @@
return gotLock;
}
-camera3::Size Camera3Device::getMaxJpegResolution() const {
- int32_t maxJpegWidth = 0, maxJpegHeight = 0;
- const int STREAM_CONFIGURATION_SIZE = 4;
- const int STREAM_FORMAT_OFFSET = 0;
- const int STREAM_WIDTH_OFFSET = 1;
- const int STREAM_HEIGHT_OFFSET = 2;
- const int STREAM_IS_INPUT_OFFSET = 3;
- bool isHighResolutionSensor =
- camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo);
- int32_t scalerSizesTag = isHighResolutionSensor ?
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
- camera_metadata_ro_entry_t availableStreamConfigs =
- mDeviceInfo.find(scalerSizesTag);
- if (availableStreamConfigs.count == 0 ||
- availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
- return camera3::Size(0, 0);
- }
-
- // Get max jpeg size (area-wise).
- for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
- int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
- int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
- int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
- int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
- if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
- && format == HAL_PIXEL_FORMAT_BLOB &&
- (width * height > maxJpegWidth * maxJpegHeight)) {
- maxJpegWidth = width;
- maxJpegHeight = height;
- }
- }
-
- return camera3::Size(maxJpegWidth, maxJpegHeight);
-}
-
nsecs_t Camera3Device::getMonoToBoottimeOffset() {
// try three times to get the clock offset, choose the one
// with the minimum gap in measurements.
@@ -625,13 +589,26 @@
}
ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
- // Get max jpeg size (area-wise).
- camera3::Size maxJpegResolution = getMaxJpegResolution();
- if (maxJpegResolution.width == 0) {
+ // Get max jpeg size (area-wise) for default sensor pixel mode
+ camera3::Size maxDefaultJpegResolution =
+ SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
+ /*isUltraHighResolutionSensor*/false);
+ // Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
+ // not ultra high res sensor
+ camera3::Size uhrMaxJpegResolution =
+ SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
+ /*isUltraHighResolution*/true);
+ if (maxDefaultJpegResolution.width == 0) {
ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
__FUNCTION__, mId.string());
return BAD_VALUE;
}
+ bool useMaxSensorPixelModeThreshold = false;
+ if (uhrMaxJpegResolution.width != 0 &&
+ width * height > maxDefaultJpegResolution.width * maxDefaultJpegResolution.height) {
+ // Use the ultra high res max jpeg size and max jpeg buffer size
+ useMaxSensorPixelModeThreshold = true;
+ }
// Get max jpeg buffer size
ssize_t maxJpegBufferSize = 0;
@@ -642,11 +619,19 @@
return BAD_VALUE;
}
maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
+
+ camera3::Size chosenMaxJpegResolution = maxDefaultJpegResolution;
+ if (useMaxSensorPixelModeThreshold) {
+ maxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(uhrMaxJpegResolution,
+ maxDefaultJpegResolution, maxJpegBufferSize);
+ chosenMaxJpegResolution = uhrMaxJpegResolution;
+ }
assert(kMinJpegBufferSize < maxJpegBufferSize);
// Calculate final jpeg buffer size for the given resolution.
float scaleFactor = ((float) (width * height)) /
- (maxJpegResolution.width * maxJpegResolution.height);
+ (chosenMaxJpegResolution.width * chosenMaxJpegResolution.height);
ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
kMinJpegBufferSize;
if (jpegBufferSize > maxJpegBufferSize) {
@@ -654,7 +639,6 @@
__FUNCTION__, maxJpegBufferSize);
jpegBufferSize = maxJpegBufferSize;
}
-
return jpegBufferSize;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index aeae042..53a696f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -776,12 +776,6 @@
bool tryLockSpinRightRound(Mutex& lock);
/**
- * Helper function to get the largest Jpeg resolution (in area)
- * Return Size(0, 0) if static metatdata is invalid
- */
- camera3::Size getMaxJpegResolution() const;
-
- /**
* Helper function to get the offset between MONOTONIC and BOOTTIME
* timestamp.
*/
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 225dee9..ab861ad 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -1092,7 +1092,7 @@
time_t now = time(0);
tm *localTime = localtime(&now);
snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
- 1900 + localTime->tm_year, localTime->tm_mon, localTime->tm_mday,
+ 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
timestamp, fileExtension.c_str());
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index ed6ee9b..454c05f 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -36,6 +36,48 @@
bool SessionConfigurationUtils::IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);
+camera3::Size SessionConfigurationUtils::getMaxJpegResolution(const CameraMetadata &metadata,
+ bool ultraHighResolution) {
+ int32_t maxJpegWidth = 0, maxJpegHeight = 0;
+ const int STREAM_CONFIGURATION_SIZE = 4;
+ const int STREAM_FORMAT_OFFSET = 0;
+ const int STREAM_WIDTH_OFFSET = 1;
+ const int STREAM_HEIGHT_OFFSET = 2;
+ const int STREAM_IS_INPUT_OFFSET = 3;
+
+ int32_t scalerSizesTag = ultraHighResolution ?
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+ camera_metadata_ro_entry_t availableStreamConfigs =
+ metadata.find(scalerSizesTag);
+ if (availableStreamConfigs.count == 0 ||
+ availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+ return camera3::Size(0, 0);
+ }
+
+ // Get max jpeg size (area-wise).
+ for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+ int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+ int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+ int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+ int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+ if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+ && format == HAL_PIXEL_FORMAT_BLOB &&
+ (width * height > maxJpegWidth * maxJpegHeight)) {
+ maxJpegWidth = width;
+ maxJpegHeight = height;
+ }
+ }
+
+ return camera3::Size(maxJpegWidth, maxJpegHeight);
+}
+
+size_t SessionConfigurationUtils::getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+ camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
+ return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+ (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
+}
+
void StreamConfiguration::getStreamConfigurations(
const CameraMetadata &staticInfo, int configuration,
std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index b4814b6..1fbaa69 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -71,6 +71,12 @@
class SessionConfigurationUtils {
public:
+ static camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
+ bool ultraHighResolution);
+
+ static size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+ camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize);
+
static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
// Find the closest dimensions for a given format in available stream configurations with