Merge changes from topic "InputMapper-refactor-256009910" into udc-dev
* changes:
InputMapper refactor: CursorInputMapper
InputMapper refactor: Modify InputMapper constructor for configuration
diff --git a/include/input/Input.h b/include/input/Input.h
index a033535..1e810b4 100644
--- a/include/input/Input.h
+++ b/include/input/Input.h
@@ -210,7 +210,20 @@
*/
float transformAngle(const ui::Transform& transform, float angleRadians);
-const char* inputEventTypeToString(int32_t type);
+/**
+ * The type of the InputEvent.
+ * This should have 1:1 correspondence with the values of anonymous enum defined in input.h.
+ */
+enum class InputEventType {
+ KEY = AINPUT_EVENT_TYPE_KEY,
+ MOTION = AINPUT_EVENT_TYPE_MOTION,
+ FOCUS = AINPUT_EVENT_TYPE_FOCUS,
+ CAPTURE = AINPUT_EVENT_TYPE_CAPTURE,
+ DRAG = AINPUT_EVENT_TYPE_DRAG,
+ TOUCH_MODE = AINPUT_EVENT_TYPE_TOUCH_MODE,
+ ftl_first = KEY,
+ ftl_last = TOUCH_MODE,
+};
std::string inputEventSourceToString(int32_t source);
@@ -482,7 +495,7 @@
public:
virtual ~InputEvent() { }
- virtual int32_t getType() const = 0;
+ virtual InputEventType getType() const = 0;
inline int32_t getId() const { return mId; }
@@ -513,6 +526,8 @@
std::array<uint8_t, 32> mHmac;
};
+std::ostream& operator<<(std::ostream& out, const InputEvent& event);
+
/*
* Key events.
*/
@@ -520,7 +535,7 @@
public:
virtual ~KeyEvent() { }
- virtual int32_t getType() const { return AINPUT_EVENT_TYPE_KEY; }
+ virtual InputEventType getType() const { return InputEventType::KEY; }
inline int32_t getAction() const { return mAction; }
@@ -571,7 +586,7 @@
public:
virtual ~MotionEvent() { }
- virtual int32_t getType() const { return AINPUT_EVENT_TYPE_MOTION; }
+ virtual InputEventType getType() const { return InputEventType::MOTION; }
inline int32_t getAction() const { return mAction; }
@@ -899,7 +914,7 @@
public:
virtual ~FocusEvent() {}
- virtual int32_t getType() const override { return AINPUT_EVENT_TYPE_FOCUS; }
+ virtual InputEventType getType() const override { return InputEventType::FOCUS; }
inline bool getHasFocus() const { return mHasFocus; }
@@ -918,7 +933,7 @@
public:
virtual ~CaptureEvent() {}
- virtual int32_t getType() const override { return AINPUT_EVENT_TYPE_CAPTURE; }
+ virtual InputEventType getType() const override { return InputEventType::CAPTURE; }
inline bool getPointerCaptureEnabled() const { return mPointerCaptureEnabled; }
@@ -937,7 +952,7 @@
public:
virtual ~DragEvent() {}
- virtual int32_t getType() const override { return AINPUT_EVENT_TYPE_DRAG; }
+ virtual InputEventType getType() const override { return InputEventType::DRAG; }
inline bool isExiting() const { return mIsExiting; }
@@ -961,7 +976,7 @@
public:
virtual ~TouchModeEvent() {}
- virtual int32_t getType() const override { return AINPUT_EVENT_TYPE_TOUCH_MODE; }
+ virtual InputEventType getType() const override { return InputEventType::TOUCH_MODE; }
inline bool isInTouchMode() const { return mIsInTouchMode; }
diff --git a/libs/binder/IBatteryStats.cpp b/libs/binder/IBatteryStats.cpp
index 0de804c..69b11c0 100644
--- a/libs/binder/IBatteryStats.cpp
+++ b/libs/binder/IBatteryStats.cpp
@@ -128,6 +128,15 @@
remote()->transact(NOTE_RESET_FLASHLIGHT_TRANSACTION, data, &reply);
}
+ virtual binder::Status noteWakeupSensorEvent(int64_t elapsedNanos, int uid, int handle) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IBatteryStats::getInterfaceDescriptor());
+ data.writeInt64(elapsedNanos);
+ data.writeInt32(uid);
+ data.writeInt32(handle);
+ status_t ret = remote()->transact(NOTE_WAKEUP_SENSOR_EVENT_TRANSACTION, data, &reply);
+ return binder::Status::fromStatusT(ret);
+ }
};
IMPLEMENT_META_INTERFACE(BatteryStats, "com.android.internal.app.IBatteryStats")
@@ -235,6 +244,16 @@
reply->writeNoException();
return NO_ERROR;
} break;
+ case NOTE_WAKEUP_SENSOR_EVENT_TRANSACTION: {
+ CHECK_INTERFACE(IBatteryStats, data, reply);
+ int64_t elapsedNanos = data.readInt64();
+ int uid = data.readInt32();
+ int handle = data.readInt32();
+ noteWakeupSensorEvent(elapsedNanos, uid, handle);
+ reply->writeNoException();
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/libs/binder/include_batterystats/batterystats/IBatteryStats.h b/libs/binder/include_batterystats/batterystats/IBatteryStats.h
index 6defc7f..5bb01dd 100644
--- a/libs/binder/include_batterystats/batterystats/IBatteryStats.h
+++ b/libs/binder/include_batterystats/batterystats/IBatteryStats.h
@@ -19,6 +19,7 @@
#ifndef __ANDROID_VNDK__
#include <binder/IInterface.h>
+#include <binder/Status.h>
namespace android {
@@ -43,6 +44,7 @@
virtual void noteStopCamera(int uid) = 0;
virtual void noteResetCamera() = 0;
virtual void noteResetFlashlight() = 0;
+ virtual binder::Status noteWakeupSensorEvent(int64_t elapsedNanos, int uid, int sensor) = 0;
enum {
NOTE_START_SENSOR_TRANSACTION = IBinder::FIRST_CALL_TRANSACTION,
@@ -58,7 +60,8 @@
NOTE_START_CAMERA_TRANSACTION,
NOTE_STOP_CAMERA_TRANSACTION,
NOTE_RESET_CAMERA_TRANSACTION,
- NOTE_RESET_FLASHLIGHT_TRANSACTION
+ NOTE_RESET_FLASHLIGHT_TRANSACTION,
+ NOTE_WAKEUP_SENSOR_EVENT_TRANSACTION
};
};
diff --git a/libs/binder/tests/binderRpcTest.cpp b/libs/binder/tests/binderRpcTest.cpp
index 504b3ce..8d13007 100644
--- a/libs/binder/tests/binderRpcTest.cpp
+++ b/libs/binder/tests/binderRpcTest.cpp
@@ -1126,6 +1126,11 @@
android::base::unique_fd serverFd(
TEMP_FAILURE_RETRY(socket(AF_VSOCK, SOCK_STREAM | SOCK_CLOEXEC | SOCK_NONBLOCK, 0)));
+
+ if (errno == EAFNOSUPPORT) {
+ return false;
+ }
+
LOG_ALWAYS_FATAL_IF(serverFd == -1, "Could not create socket: %s", strerror(errno));
sockaddr_vm serverAddr{
diff --git a/libs/bufferqueueconverter/Android.bp b/libs/bufferqueueconverter/Android.bp
index 5f145a1..d4605ea 100644
--- a/libs/bufferqueueconverter/Android.bp
+++ b/libs/bufferqueueconverter/Android.bp
@@ -13,7 +13,7 @@
export_include_dirs: ["include"],
}
-cc_library_shared {
+cc_library {
name: "libbufferqueueconverter",
vendor_available: true,
vndk: {
diff --git a/libs/gui/BLASTBufferQueue.cpp b/libs/gui/BLASTBufferQueue.cpp
index 821dd37..5c324b2 100644
--- a/libs/gui/BLASTBufferQueue.cpp
+++ b/libs/gui/BLASTBufferQueue.cpp
@@ -582,7 +582,8 @@
// Only update mSize for destination bounds if the incoming buffer matches the requested size.
// Otherwise, it could cause stretching since the destination bounds will update before the
// buffer with the new size is acquired.
- if (mRequestedSize == getBufferSize(bufferItem)) {
+ if (mRequestedSize == getBufferSize(bufferItem) ||
+ bufferItem.mScalingMode != NATIVE_WINDOW_SCALING_MODE_FREEZE) {
mSize = mRequestedSize;
}
Rect crop = computeCrop(bufferItem);
@@ -800,34 +801,24 @@
mDequeueTimestamps.erase(bufferId);
};
-void BLASTBufferQueue::syncNextTransaction(
+bool BLASTBufferQueue::syncNextTransaction(
std::function<void(SurfaceComposerClient::Transaction*)> callback,
bool acquireSingleBuffer) {
- std::function<void(SurfaceComposerClient::Transaction*)> prevCallback = nullptr;
- SurfaceComposerClient::Transaction* prevTransaction = nullptr;
+ LOG_ALWAYS_FATAL_IF(!callback,
+ "BLASTBufferQueue: callback passed in to syncNextTransaction must not be "
+ "NULL");
- {
- std::lock_guard _lock{mMutex};
- BBQ_TRACE();
- // We're about to overwrite the previous call so we should invoke that callback
- // immediately.
- if (mTransactionReadyCallback) {
- prevCallback = mTransactionReadyCallback;
- prevTransaction = mSyncTransaction;
- }
-
- mTransactionReadyCallback = callback;
- if (callback) {
- mSyncTransaction = new SurfaceComposerClient::Transaction();
- } else {
- mSyncTransaction = nullptr;
- }
- mAcquireSingleBuffer = mTransactionReadyCallback ? acquireSingleBuffer : true;
+ std::lock_guard _lock{mMutex};
+ BBQ_TRACE();
+ if (mTransactionReadyCallback) {
+ ALOGW("Attempting to overwrite transaction callback in syncNextTransaction");
+ return false;
}
- if (prevCallback) {
- prevCallback(prevTransaction);
- }
+ mTransactionReadyCallback = callback;
+ mSyncTransaction = new SurfaceComposerClient::Transaction();
+ mAcquireSingleBuffer = acquireSingleBuffer;
+ return true;
}
void BLASTBufferQueue::stopContinuousSyncTransaction() {
@@ -835,20 +826,35 @@
SurfaceComposerClient::Transaction* prevTransaction = nullptr;
{
std::lock_guard _lock{mMutex};
- bool invokeCallback = mTransactionReadyCallback && !mAcquireSingleBuffer;
- if (invokeCallback) {
- prevCallback = mTransactionReadyCallback;
- prevTransaction = mSyncTransaction;
+ if (mAcquireSingleBuffer || !mTransactionReadyCallback) {
+ ALOGW("Attempting to stop continuous sync when none are active");
+ return;
}
+
+ prevCallback = mTransactionReadyCallback;
+ prevTransaction = mSyncTransaction;
+
mTransactionReadyCallback = nullptr;
mSyncTransaction = nullptr;
mAcquireSingleBuffer = true;
}
+
if (prevCallback) {
prevCallback(prevTransaction);
}
}
+void BLASTBufferQueue::clearSyncTransaction() {
+ std::lock_guard _lock{mMutex};
+ if (!mAcquireSingleBuffer) {
+ ALOGW("Attempting to clear sync transaction when none are active");
+ return;
+ }
+
+ mTransactionReadyCallback = nullptr;
+ mSyncTransaction = nullptr;
+}
+
bool BLASTBufferQueue::rejectBuffer(const BufferItem& item) {
if (item.mScalingMode != NATIVE_WINDOW_SCALING_MODE_FREEZE) {
// Only reject buffers if scaling mode is freeze.
diff --git a/libs/gui/include/gui/BLASTBufferQueue.h b/libs/gui/include/gui/BLASTBufferQueue.h
index 69e9f8a..a49a859 100644
--- a/libs/gui/include/gui/BLASTBufferQueue.h
+++ b/libs/gui/include/gui/BLASTBufferQueue.h
@@ -97,9 +97,10 @@
void releaseBufferCallbackLocked(const ReleaseCallbackId& id, const sp<Fence>& releaseFence,
std::optional<uint32_t> currentMaxAcquiredBufferCount,
bool fakeRelease) REQUIRES(mMutex);
- void syncNextTransaction(std::function<void(SurfaceComposerClient::Transaction*)> callback,
+ bool syncNextTransaction(std::function<void(SurfaceComposerClient::Transaction*)> callback,
bool acquireSingleBuffer = true);
void stopContinuousSyncTransaction();
+ void clearSyncTransaction();
void mergeWithNextTransaction(SurfaceComposerClient::Transaction* t, uint64_t frameNumber);
void applyPendingTransactions(uint64_t frameNumber);
diff --git a/libs/gui/tests/BLASTBufferQueue_test.cpp b/libs/gui/tests/BLASTBufferQueue_test.cpp
index cf2593d..7067c11 100644
--- a/libs/gui/tests/BLASTBufferQueue_test.cpp
+++ b/libs/gui/tests/BLASTBufferQueue_test.cpp
@@ -32,6 +32,7 @@
#include <private/gui/ComposerService.h>
#include <private/gui/ComposerServiceAIDL.h>
#include <ui/DisplayMode.h>
+#include <ui/DisplayState.h>
#include <ui/GraphicBuffer.h>
#include <ui/GraphicTypes.h>
#include <ui/Transform.h>
@@ -116,15 +117,17 @@
mBlastBufferQueueAdapter->syncNextTransaction(callback, acquireSingleBuffer);
}
- void syncNextTransaction(std::function<void(Transaction*)> callback,
+ bool syncNextTransaction(std::function<void(Transaction*)> callback,
bool acquireSingleBuffer = true) {
- mBlastBufferQueueAdapter->syncNextTransaction(callback, acquireSingleBuffer);
+ return mBlastBufferQueueAdapter->syncNextTransaction(callback, acquireSingleBuffer);
}
void stopContinuousSyncTransaction() {
mBlastBufferQueueAdapter->stopContinuousSyncTransaction();
}
+ void clearSyncTransaction() { mBlastBufferQueueAdapter->clearSyncTransaction(); }
+
int getWidth() { return mBlastBufferQueueAdapter->mSize.width; }
int getHeight() { return mBlastBufferQueueAdapter->mSize.height; }
@@ -198,11 +201,13 @@
t.apply();
t.clear();
- ui::DisplayMode mode;
- ASSERT_EQ(NO_ERROR, SurfaceComposerClient::getActiveDisplayMode(mDisplayToken, &mode));
- const ui::Size& resolution = mode.resolution;
+ ui::DisplayState displayState;
+ ASSERT_EQ(NO_ERROR, SurfaceComposerClient::getDisplayState(mDisplayToken, &displayState));
+ const ui::Size& resolution = displayState.layerStackSpaceRect;
mDisplayWidth = resolution.getWidth();
mDisplayHeight = resolution.getHeight();
+ ALOGV("Display: %dx%d orientation:%d", mDisplayWidth, mDisplayHeight,
+ displayState.orientation);
mSurfaceControl = mClient->createSurface(String8("TestSurface"), mDisplayWidth,
mDisplayHeight, PIXEL_FORMAT_RGBA_8888,
@@ -1108,7 +1113,11 @@
ASSERT_NE(nullptr, adapter.getTransactionReadyCallback());
auto callback2 = [](Transaction*) {};
- adapter.syncNextTransaction(callback2);
+ ASSERT_FALSE(adapter.syncNextTransaction(callback2));
+
+ sp<IGraphicBufferProducer> igbProducer;
+ setUpProducer(adapter, igbProducer);
+ queueBuffer(igbProducer, 0, 255, 0, 0);
std::unique_lock<std::mutex> lock(mutex);
if (!receivedCallback) {
@@ -1120,6 +1129,37 @@
ASSERT_TRUE(receivedCallback);
}
+TEST_F(BLASTBufferQueueTest, ClearSyncTransaction) {
+ std::mutex mutex;
+ std::condition_variable callbackReceivedCv;
+ bool receivedCallback = false;
+
+ BLASTBufferQueueHelper adapter(mSurfaceControl, mDisplayWidth, mDisplayHeight);
+ ASSERT_EQ(nullptr, adapter.getTransactionReadyCallback());
+ auto callback = [&](Transaction*) {
+ std::unique_lock<std::mutex> lock(mutex);
+ receivedCallback = true;
+ callbackReceivedCv.notify_one();
+ };
+ adapter.syncNextTransaction(callback);
+ ASSERT_NE(nullptr, adapter.getTransactionReadyCallback());
+
+ adapter.clearSyncTransaction();
+
+ sp<IGraphicBufferProducer> igbProducer;
+ setUpProducer(adapter, igbProducer);
+ queueBuffer(igbProducer, 0, 255, 0, 0);
+
+ std::unique_lock<std::mutex> lock(mutex);
+ if (!receivedCallback) {
+ ASSERT_EQ(callbackReceivedCv.wait_for(lock, std::chrono::seconds(3)),
+ std::cv_status::timeout)
+ << "did not receive callback";
+ }
+
+ ASSERT_FALSE(receivedCallback);
+}
+
TEST_F(BLASTBufferQueueTest, SyncNextTransactionDropBuffer) {
uint8_t r = 255;
uint8_t g = 0;
diff --git a/libs/gui/tests/EndToEndNativeInputTest.cpp b/libs/gui/tests/EndToEndNativeInputTest.cpp
index 9a9bd4a..4ec7a06 100644
--- a/libs/gui/tests/EndToEndNativeInputTest.cpp
+++ b/libs/gui/tests/EndToEndNativeInputTest.cpp
@@ -164,7 +164,7 @@
void assertFocusChange(bool hasFocus) {
InputEvent *ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_FOCUS, ev->getType());
+ ASSERT_EQ(InputEventType::FOCUS, ev->getType());
FocusEvent *focusEvent = static_cast<FocusEvent *>(ev);
EXPECT_EQ(hasFocus, focusEvent->getHasFocus());
}
@@ -172,7 +172,7 @@
void expectTap(int x, int y) {
InputEvent* ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, ev->getType());
+ ASSERT_EQ(InputEventType::MOTION, ev->getType());
MotionEvent* mev = static_cast<MotionEvent*>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_DOWN, mev->getAction());
EXPECT_EQ(x, mev->getX(0));
@@ -181,7 +181,7 @@
ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, ev->getType());
+ ASSERT_EQ(InputEventType::MOTION, ev->getType());
mev = static_cast<MotionEvent*>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_UP, mev->getAction());
EXPECT_EQ(0, mev->getFlags() & VERIFIED_MOTION_EVENT_FLAGS);
@@ -190,7 +190,7 @@
void expectTapWithFlag(int x, int y, int32_t flags) {
InputEvent *ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, ev->getType());
+ ASSERT_EQ(InputEventType::MOTION, ev->getType());
MotionEvent *mev = static_cast<MotionEvent *>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_DOWN, mev->getAction());
EXPECT_EQ(x, mev->getX(0));
@@ -199,7 +199,7 @@
ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, ev->getType());
+ ASSERT_EQ(InputEventType::MOTION, ev->getType());
mev = static_cast<MotionEvent *>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_UP, mev->getAction());
EXPECT_EQ(flags, mev->getFlags() & flags);
@@ -208,7 +208,7 @@
void expectTapInDisplayCoordinates(int displayX, int displayY) {
InputEvent *ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, ev->getType());
+ ASSERT_EQ(InputEventType::MOTION, ev->getType());
MotionEvent *mev = static_cast<MotionEvent *>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_DOWN, mev->getAction());
const PointerCoords &coords = *mev->getRawPointerCoords(0 /*pointerIndex*/);
@@ -218,7 +218,7 @@
ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, ev->getType());
+ ASSERT_EQ(InputEventType::MOTION, ev->getType());
mev = static_cast<MotionEvent *>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_UP, mev->getAction());
EXPECT_EQ(0, mev->getFlags() & VERIFIED_MOTION_EVENT_FLAGS);
@@ -227,7 +227,7 @@
void expectKey(uint32_t keycode) {
InputEvent *ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_KEY, ev->getType());
+ ASSERT_EQ(InputEventType::KEY, ev->getType());
KeyEvent *keyEvent = static_cast<KeyEvent *>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_DOWN, keyEvent->getAction());
EXPECT_EQ(keycode, keyEvent->getKeyCode());
@@ -235,7 +235,7 @@
ev = consumeEvent();
ASSERT_NE(ev, nullptr);
- ASSERT_EQ(AINPUT_EVENT_TYPE_KEY, ev->getType());
+ ASSERT_EQ(InputEventType::KEY, ev->getType());
keyEvent = static_cast<KeyEvent *>(ev);
EXPECT_EQ(AMOTION_EVENT_ACTION_UP, keyEvent->getAction());
EXPECT_EQ(keycode, keyEvent->getKeyCode());
diff --git a/libs/input/Input.cpp b/libs/input/Input.cpp
index 4dbf575..00925ba 100644
--- a/libs/input/Input.cpp
+++ b/libs/input/Input.cpp
@@ -170,30 +170,6 @@
return atan2f(transformedPoint.x, -transformedPoint.y);
}
-const char* inputEventTypeToString(int32_t type) {
- switch (type) {
- case AINPUT_EVENT_TYPE_KEY: {
- return "KEY";
- }
- case AINPUT_EVENT_TYPE_MOTION: {
- return "MOTION";
- }
- case AINPUT_EVENT_TYPE_FOCUS: {
- return "FOCUS";
- }
- case AINPUT_EVENT_TYPE_CAPTURE: {
- return "CAPTURE";
- }
- case AINPUT_EVENT_TYPE_DRAG: {
- return "DRAG";
- }
- case AINPUT_EVENT_TYPE_TOUCH_MODE: {
- return "TOUCH_MODE";
- }
- }
- return "UNKNOWN";
-}
-
std::string inputEventSourceToString(int32_t source) {
if (source == AINPUT_SOURCE_UNKNOWN) {
return "UNKNOWN";
@@ -287,6 +263,37 @@
return idGen.nextId();
}
+std::ostream& operator<<(std::ostream& out, const InputEvent& event) {
+ switch (event.getType()) {
+ case InputEventType::KEY: {
+ const KeyEvent& keyEvent = static_cast<const KeyEvent&>(event);
+ out << keyEvent;
+ return out;
+ }
+ case InputEventType::MOTION: {
+ const MotionEvent& motionEvent = static_cast<const MotionEvent&>(event);
+ out << motionEvent;
+ return out;
+ }
+ case InputEventType::FOCUS: {
+ out << "FocusEvent";
+ return out;
+ }
+ case InputEventType::CAPTURE: {
+ out << "CaptureEvent";
+ return out;
+ }
+ case InputEventType::DRAG: {
+ out << "DragEvent";
+ return out;
+ }
+ case InputEventType::TOUCH_MODE: {
+ out << "TouchModeEvent";
+ return out;
+ }
+ }
+}
+
// --- KeyEvent ---
const char* KeyEvent::getLabel(int32_t keyCode) {
@@ -1165,44 +1172,51 @@
void PooledInputEventFactory::recycle(InputEvent* event) {
switch (event->getType()) {
- case AINPUT_EVENT_TYPE_KEY:
- if (mKeyEventPool.size() < mMaxPoolSize) {
- mKeyEventPool.push(std::unique_ptr<KeyEvent>(static_cast<KeyEvent*>(event)));
- return;
+ case InputEventType::KEY: {
+ if (mKeyEventPool.size() < mMaxPoolSize) {
+ mKeyEventPool.push(std::unique_ptr<KeyEvent>(static_cast<KeyEvent*>(event)));
+ return;
+ }
+ break;
}
- break;
- case AINPUT_EVENT_TYPE_MOTION:
- if (mMotionEventPool.size() < mMaxPoolSize) {
- mMotionEventPool.push(std::unique_ptr<MotionEvent>(static_cast<MotionEvent*>(event)));
- return;
+ case InputEventType::MOTION: {
+ if (mMotionEventPool.size() < mMaxPoolSize) {
+ mMotionEventPool.push(
+ std::unique_ptr<MotionEvent>(static_cast<MotionEvent*>(event)));
+ return;
+ }
+ break;
}
- break;
- case AINPUT_EVENT_TYPE_FOCUS:
- if (mFocusEventPool.size() < mMaxPoolSize) {
- mFocusEventPool.push(std::unique_ptr<FocusEvent>(static_cast<FocusEvent*>(event)));
- return;
+ case InputEventType::FOCUS: {
+ if (mFocusEventPool.size() < mMaxPoolSize) {
+ mFocusEventPool.push(std::unique_ptr<FocusEvent>(static_cast<FocusEvent*>(event)));
+ return;
+ }
+ break;
}
- break;
- case AINPUT_EVENT_TYPE_CAPTURE:
- if (mCaptureEventPool.size() < mMaxPoolSize) {
- mCaptureEventPool.push(
- std::unique_ptr<CaptureEvent>(static_cast<CaptureEvent*>(event)));
- return;
+ case InputEventType::CAPTURE: {
+ if (mCaptureEventPool.size() < mMaxPoolSize) {
+ mCaptureEventPool.push(
+ std::unique_ptr<CaptureEvent>(static_cast<CaptureEvent*>(event)));
+ return;
+ }
+ break;
}
- break;
- case AINPUT_EVENT_TYPE_DRAG:
- if (mDragEventPool.size() < mMaxPoolSize) {
- mDragEventPool.push(std::unique_ptr<DragEvent>(static_cast<DragEvent*>(event)));
- return;
+ case InputEventType::DRAG: {
+ if (mDragEventPool.size() < mMaxPoolSize) {
+ mDragEventPool.push(std::unique_ptr<DragEvent>(static_cast<DragEvent*>(event)));
+ return;
+ }
+ break;
}
- break;
- case AINPUT_EVENT_TYPE_TOUCH_MODE:
- if (mTouchModeEventPool.size() < mMaxPoolSize) {
- mTouchModeEventPool.push(
- std::unique_ptr<TouchModeEvent>(static_cast<TouchModeEvent*>(event)));
- return;
+ case InputEventType::TOUCH_MODE: {
+ if (mTouchModeEventPool.size() < mMaxPoolSize) {
+ mTouchModeEventPool.push(
+ std::unique_ptr<TouchModeEvent>(static_cast<TouchModeEvent*>(event)));
+ return;
+ }
+ break;
}
- break;
}
delete event;
}
diff --git a/libs/input/android/os/InputEventInjectionSync.aidl b/libs/input/android/os/InputEventInjectionSync.aidl
index 95d24cb..2d225fa 100644
--- a/libs/input/android/os/InputEventInjectionSync.aidl
+++ b/libs/input/android/os/InputEventInjectionSync.aidl
@@ -33,4 +33,7 @@
/* Waits for the input event to be completely processed. */
WAIT_FOR_FINISHED = 2,
+
+ ftl_first = NONE,
+ ftl_last = WAIT_FOR_FINISHED,
}
diff --git a/libs/input/tests/InputEvent_test.cpp b/libs/input/tests/InputEvent_test.cpp
index 59125dd..a965573 100644
--- a/libs/input/tests/InputEvent_test.cpp
+++ b/libs/input/tests/InputEvent_test.cpp
@@ -197,7 +197,7 @@
ARBITRARY_DOWN_TIME, ARBITRARY_EVENT_TIME);
ASSERT_EQ(id, event.getId());
- ASSERT_EQ(AINPUT_EVENT_TYPE_KEY, event.getType());
+ ASSERT_EQ(InputEventType::KEY, event.getType());
ASSERT_EQ(2, event.getDeviceId());
ASSERT_EQ(AINPUT_SOURCE_GAMEPAD, event.getSource());
ASSERT_EQ(DISPLAY_ID, event.getDisplayId());
@@ -346,7 +346,7 @@
void MotionEventTest::assertEqualsEventWithHistory(const MotionEvent* event) {
// Check properties.
ASSERT_EQ(mId, event->getId());
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, event->getType());
+ ASSERT_EQ(InputEventType::MOTION, event->getType());
ASSERT_EQ(2, event->getDeviceId());
ASSERT_EQ(AINPUT_SOURCE_TOUCHSCREEN, event->getSource());
ASSERT_EQ(DISPLAY_ID, event->getDisplayId());
diff --git a/libs/input/tests/InputPublisherAndConsumer_test.cpp b/libs/input/tests/InputPublisherAndConsumer_test.cpp
index 965fda7..3ecf8ee 100644
--- a/libs/input/tests/InputPublisherAndConsumer_test.cpp
+++ b/libs/input/tests/InputPublisherAndConsumer_test.cpp
@@ -98,8 +98,7 @@
ASSERT_TRUE(event != nullptr)
<< "consumer should have returned non-NULL event";
- ASSERT_EQ(AINPUT_EVENT_TYPE_KEY, event->getType())
- << "consumer should have returned a key event";
+ ASSERT_EQ(InputEventType::KEY, event->getType()) << "consumer should have returned a key event";
KeyEvent* keyEvent = static_cast<KeyEvent*>(event);
EXPECT_EQ(seq, consumeSeq);
@@ -207,7 +206,7 @@
ASSERT_TRUE(event != nullptr)
<< "consumer should have returned non-NULL event";
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, event->getType())
+ ASSERT_EQ(InputEventType::MOTION, event->getType())
<< "consumer should have returned a motion event";
MotionEvent* motionEvent = static_cast<MotionEvent*>(event);
@@ -298,7 +297,7 @@
ASSERT_EQ(OK, status) << "consumer consume should return OK";
ASSERT_TRUE(event != nullptr) << "consumer should have returned non-NULL event";
- ASSERT_EQ(AINPUT_EVENT_TYPE_FOCUS, event->getType())
+ ASSERT_EQ(InputEventType::FOCUS, event->getType())
<< "consumer should have returned a focus event";
FocusEvent* focusEvent = static_cast<FocusEvent*>(event);
@@ -339,7 +338,7 @@
ASSERT_EQ(OK, status) << "consumer consume should return OK";
ASSERT_TRUE(event != nullptr) << "consumer should have returned non-NULL event";
- ASSERT_EQ(AINPUT_EVENT_TYPE_CAPTURE, event->getType())
+ ASSERT_EQ(InputEventType::CAPTURE, event->getType())
<< "consumer should have returned a capture event";
const CaptureEvent* captureEvent = static_cast<CaptureEvent*>(event);
@@ -381,7 +380,7 @@
ASSERT_EQ(OK, status) << "consumer consume should return OK";
ASSERT_TRUE(event != nullptr) << "consumer should have returned non-NULL event";
- ASSERT_EQ(AINPUT_EVENT_TYPE_DRAG, event->getType())
+ ASSERT_EQ(InputEventType::DRAG, event->getType())
<< "consumer should have returned a drag event";
const DragEvent& dragEvent = static_cast<const DragEvent&>(*event);
@@ -423,7 +422,7 @@
ASSERT_EQ(OK, status) << "consumer consume should return OK";
ASSERT_TRUE(event != nullptr) << "consumer should have returned non-NULL event";
- ASSERT_EQ(AINPUT_EVENT_TYPE_TOUCH_MODE, event->getType())
+ ASSERT_EQ(InputEventType::TOUCH_MODE, event->getType())
<< "consumer should have returned a touch mode event";
const TouchModeEvent& touchModeEvent = static_cast<const TouchModeEvent&>(*event);
diff --git a/libs/jpegrecoverymap/Android.bp b/libs/jpegrecoverymap/Android.bp
index a1b0e19..a376ced 100644
--- a/libs/jpegrecoverymap/Android.bp
+++ b/libs/jpegrecoverymap/Android.bp
@@ -31,7 +31,7 @@
srcs: [
"icc.cpp",
"jpegr.cpp",
- "recoverymapmath.cpp",
+ "gainmapmath.cpp",
"jpegrutils.cpp",
"multipictureformat.cpp",
],
diff --git a/libs/jpegrecoverymap/recoverymapmath.cpp b/libs/jpegrecoverymap/gainmapmath.cpp
similarity index 94%
rename from libs/jpegrecoverymap/recoverymapmath.cpp
rename to libs/jpegrecoverymap/gainmapmath.cpp
index ce6fc8f..f15a078 100644
--- a/libs/jpegrecoverymap/recoverymapmath.cpp
+++ b/libs/jpegrecoverymap/gainmapmath.cpp
@@ -16,7 +16,7 @@
#include <cmath>
#include <vector>
-#include <jpegrecoverymap/recoverymapmath.h>
+#include <jpegrecoverymap/gainmapmath.h>
namespace android::jpegrecoverymap {
@@ -441,14 +441,14 @@
////////////////////////////////////////////////////////////////////////////////
-// Recovery map calculations
-uint8_t encodeRecovery(float y_sdr, float y_hdr, jr_metadata_ptr metadata) {
- return encodeRecovery(y_sdr, y_hdr, metadata,
- log2(metadata->minContentBoost), log2(metadata->maxContentBoost));
+// Gain map calculations
+uint8_t encodeGain(float y_sdr, float y_hdr, jr_metadata_ptr metadata) {
+ return encodeGain(y_sdr, y_hdr, metadata,
+ log2(metadata->minContentBoost), log2(metadata->maxContentBoost));
}
-uint8_t encodeRecovery(float y_sdr, float y_hdr, jr_metadata_ptr metadata,
- float log2MinContentBoost, float log2MaxContentBoost) {
+uint8_t encodeGain(float y_sdr, float y_hdr, jr_metadata_ptr metadata,
+ float log2MinContentBoost, float log2MaxContentBoost) {
float gain = 1.0f;
if (y_sdr > 0.0f) {
gain = y_hdr / y_sdr;
@@ -462,23 +462,23 @@
* 255.0f);
}
-Color applyRecovery(Color e, float recovery, jr_metadata_ptr metadata) {
- float logBoost = log2(metadata->minContentBoost) * (1.0f - recovery)
- + log2(metadata->maxContentBoost) * recovery;
- float recoveryFactor = exp2(logBoost);
- return e * recoveryFactor;
+Color applyGain(Color e, float gain, jr_metadata_ptr metadata) {
+ float logBoost = log2(metadata->minContentBoost) * (1.0f - gain)
+ + log2(metadata->maxContentBoost) * gain;
+ float gainFactor = exp2(logBoost);
+ return e * gainFactor;
}
-Color applyRecovery(Color e, float recovery, jr_metadata_ptr metadata, float displayBoost) {
- float logBoost = log2(metadata->minContentBoost) * (1.0f - recovery)
- + log2(metadata->maxContentBoost) * recovery;
- float recoveryFactor = exp2(logBoost * displayBoost / metadata->maxContentBoost);
- return e * recoveryFactor;
+Color applyGain(Color e, float gain, jr_metadata_ptr metadata, float displayBoost) {
+ float logBoost = log2(metadata->minContentBoost) * (1.0f - gain)
+ + log2(metadata->maxContentBoost) * gain;
+ float gainFactor = exp2(logBoost * displayBoost / metadata->maxContentBoost);
+ return e * gainFactor;
}
-Color applyRecoveryLUT(Color e, float recovery, RecoveryLUT& recoveryLUT) {
- float recoveryFactor = recoveryLUT.getRecoveryFactor(recovery);
- return e * recoveryFactor;
+Color applyGainLUT(Color e, float gain, GainLUT& gainLUT) {
+ float gainFactor = gainLUT.getGainFactor(gain);
+ return e * gainFactor;
}
Color getYuv420Pixel(jr_uncompressed_ptr image, size_t x, size_t y) {
diff --git a/libs/jpegrecoverymap/icc.cpp b/libs/jpegrecoverymap/icc.cpp
index 5412cb1..6e78f67 100644
--- a/libs/jpegrecoverymap/icc.cpp
+++ b/libs/jpegrecoverymap/icc.cpp
@@ -15,7 +15,7 @@
*/
#include <jpegrecoverymap/icc.h>
-#include <jpegrecoverymap/recoverymapmath.h>
+#include <jpegrecoverymap/gainmapmath.h>
#include <vector>
#include <utils/Log.h>
diff --git a/libs/jpegrecoverymap/include/jpegrecoverymap/recoverymapmath.h b/libs/jpegrecoverymap/include/jpegrecoverymap/gainmapmath.h
similarity index 86%
rename from libs/jpegrecoverymap/include/jpegrecoverymap/recoverymapmath.h
rename to libs/jpegrecoverymap/include/jpegrecoverymap/gainmapmath.h
index a32b291..57fddd0 100644
--- a/libs/jpegrecoverymap/include/jpegrecoverymap/recoverymapmath.h
+++ b/libs/jpegrecoverymap/include/jpegrecoverymap/gainmapmath.h
@@ -129,40 +129,40 @@
| (e > 143) * 0x7FFF;
}
-constexpr size_t kRecoveryFactorPrecision = 10;
-constexpr size_t kRecoveryFactorNumEntries = 1 << kRecoveryFactorPrecision;
-struct RecoveryLUT {
- RecoveryLUT(jr_metadata_ptr metadata) {
- for (int idx = 0; idx < kRecoveryFactorNumEntries; idx++) {
- float value = static_cast<float>(idx) / static_cast<float>(kRecoveryFactorNumEntries - 1);
+constexpr size_t kGainFactorPrecision = 10;
+constexpr size_t kGainFactorNumEntries = 1 << kGainFactorPrecision;
+struct GainLUT {
+ GainLUT(jr_metadata_ptr metadata) {
+ for (int idx = 0; idx < kGainFactorNumEntries; idx++) {
+ float value = static_cast<float>(idx) / static_cast<float>(kGainFactorNumEntries - 1);
float logBoost = log2(metadata->minContentBoost) * (1.0f - value)
+ log2(metadata->maxContentBoost) * value;
- mRecoveryTable[idx] = exp2(logBoost);
+ mGainTable[idx] = exp2(logBoost);
}
}
- RecoveryLUT(jr_metadata_ptr metadata, float displayBoost) {
+ GainLUT(jr_metadata_ptr metadata, float displayBoost) {
float boostFactor = displayBoost > 0 ? displayBoost / metadata->maxContentBoost : 1.0f;
- for (int idx = 0; idx < kRecoveryFactorNumEntries; idx++) {
- float value = static_cast<float>(idx) / static_cast<float>(kRecoveryFactorNumEntries - 1);
+ for (int idx = 0; idx < kGainFactorNumEntries; idx++) {
+ float value = static_cast<float>(idx) / static_cast<float>(kGainFactorNumEntries - 1);
float logBoost = log2(metadata->minContentBoost) * (1.0f - value)
+ log2(metadata->maxContentBoost) * value;
- mRecoveryTable[idx] = exp2(logBoost * boostFactor);
+ mGainTable[idx] = exp2(logBoost * boostFactor);
}
}
- ~RecoveryLUT() {
+ ~GainLUT() {
}
- float getRecoveryFactor(float recovery) {
- uint32_t idx = static_cast<uint32_t>(recovery * (kRecoveryFactorNumEntries - 1));
+ float getGainFactor(float gain) {
+ uint32_t idx = static_cast<uint32_t>(gain * (kGainFactorNumEntries - 1));
//TODO() : Remove once conversion modules have appropriate clamping in place
- idx = CLIP3(idx, 0, kRecoveryFactorNumEntries - 1);
- return mRecoveryTable[idx];
+ idx = CLIP3(idx, 0, kGainFactorNumEntries - 1);
+ return mGainTable[idx];
}
private:
- float mRecoveryTable[kRecoveryFactorNumEntries];
+ float mGainTable[kGainFactorNumEntries];
};
struct ShepardsIDW {
@@ -195,11 +195,11 @@
// p60 p61 p62 p63 p64 p65 p66 p67
// p70 p71 p72 p73 p74 p75 p76 p77
- // Recovery Map (for 4 scale factor) :-
+ // Gain Map (for 4 scale factor) :-
// m00 p01
// m10 m11
- // Recovery sample of curr 4x4, right 4x4, bottom 4x4, bottom right 4x4 are used during
+ // Gain sample of curr 4x4, right 4x4, bottom 4x4, bottom right 4x4 are used during
// reconstruction. hence table weight size is 4.
float* mWeights;
// TODO: check if its ok to mWeights at places
@@ -354,29 +354,29 @@
inline Color identityConversion(Color e) { return e; }
/*
- * Get the conversion to apply to the HDR image for recovery map generation
+ * Get the conversion to apply to the HDR image for gain map generation
*/
ColorTransformFn getHdrConversionFn(jpegr_color_gamut sdr_gamut, jpegr_color_gamut hdr_gamut);
////////////////////////////////////////////////////////////////////////////////
-// Recovery map calculations
+// Gain map calculations
/*
- * Calculate the 8-bit unsigned integer recovery value for the given SDR and HDR
+ * Calculate the 8-bit unsigned integer gain value for the given SDR and HDR
* luminances in linear space, and the hdr ratio to encode against.
*/
-uint8_t encodeRecovery(float y_sdr, float y_hdr, jr_metadata_ptr metadata);
-uint8_t encodeRecovery(float y_sdr, float y_hdr, jr_metadata_ptr metadata,
- float log2MinContentBoost, float log2MaxContentBoost);
+uint8_t encodeGain(float y_sdr, float y_hdr, jr_metadata_ptr metadata);
+uint8_t encodeGain(float y_sdr, float y_hdr, jr_metadata_ptr metadata,
+ float log2MinContentBoost, float log2MaxContentBoost);
/*
- * Calculates the linear luminance in nits after applying the given recovery
+ * Calculates the linear luminance in nits after applying the given gain
* value, with the given hdr ratio, to the given sdr input in the range [0, 1].
*/
-Color applyRecovery(Color e, float recovery, jr_metadata_ptr metadata);
-Color applyRecovery(Color e, float recovery, jr_metadata_ptr metadata, float displayBoost);
-Color applyRecoveryLUT(Color e, float recovery, RecoveryLUT& recoveryLUT);
+Color applyGain(Color e, float gain, jr_metadata_ptr metadata);
+Color applyGain(Color e, float gain, jr_metadata_ptr metadata, float displayBoost);
+Color applyGainLUT(Color e, float gain, GainLUT& gainLUT);
/*
* Helper for sampling from YUV 420 images.
@@ -405,7 +405,7 @@
Color sampleP010(jr_uncompressed_ptr map, size_t map_scale_factor, size_t x, size_t y);
/*
- * Sample the recovery value for the map from a given x,y coordinate on a scale
+ * Sample the gain value for the map from a given x,y coordinate on a scale
* that is map scale factor larger than the map size.
*/
float sampleMap(jr_uncompressed_ptr map, float map_scale_factor, size_t x, size_t y);
diff --git a/libs/jpegrecoverymap/include/jpegrecoverymap/jpegr.h b/libs/jpegrecoverymap/include/jpegrecoverymap/jpegr.h
index afec065..ce7b33b 100644
--- a/libs/jpegrecoverymap/include/jpegrecoverymap/jpegr.h
+++ b/libs/jpegrecoverymap/include/jpegrecoverymap/jpegr.h
@@ -58,14 +58,14 @@
};
/*
- * Holds information for uncompressed image or recovery map.
+ * Holds information for uncompressed image or gain map.
*/
struct jpegr_uncompressed_struct {
// Pointer to the data location.
void* data;
- // Width of the recovery map or the luma plane of the image in pixels.
+ // Width of the gain map or the luma plane of the image in pixels.
int width;
- // Height of the recovery map or the luma plane of the image in pixels.
+ // Height of the gain map or the luma plane of the image in pixels.
int height;
// Color gamut.
jpegr_color_gamut colorGamut;
@@ -86,7 +86,7 @@
};
/*
- * Holds information for compressed image or recovery map.
+ * Holds information for compressed image or gain map.
*/
struct jpegr_compressed_struct {
// Pointer to the data location.
@@ -110,7 +110,7 @@
};
/*
- * Holds information for recovery map related metadata.
+ * Holds information for gain map related metadata.
*/
struct jpegr_metadata_struct {
// JPEG/R version
@@ -135,8 +135,8 @@
* Encode API-0
* Compress JPEGR image from 10-bit HDR YUV.
*
- * Tonemap the HDR input to a SDR image, generate recovery map from the HDR and SDR images,
- * compress SDR YUV to 8-bit JPEG and append the recovery map to the end of the compressed
+ * Tonemap the HDR input to a SDR image, generate gain map from the HDR and SDR images,
+ * compress SDR YUV to 8-bit JPEG and append the gain map to the end of the compressed
* JPEG.
* @param uncompressed_p010_image uncompressed HDR image in P010 color format
* @param hdr_tf transfer function of the HDR image
@@ -156,8 +156,8 @@
* Encode API-1
* Compress JPEGR image from 10-bit HDR YUV and 8-bit SDR YUV.
*
- * Generate recovery map from the HDR and SDR inputs, compress SDR YUV to 8-bit JPEG and append
- * the recovery map to the end of the compressed JPEG. HDR and SDR inputs must be the same
+ * Generate gain map from the HDR and SDR inputs, compress SDR YUV to 8-bit JPEG and append
+ * the gain map to the end of the compressed JPEG. HDR and SDR inputs must be the same
* resolution.
* @param uncompressed_p010_image uncompressed HDR image in P010 color format
* @param uncompressed_yuv_420_image uncompressed SDR image in YUV_420 color format
@@ -181,7 +181,7 @@
*
* This method requires HAL Hardware JPEG encoder.
*
- * Generate recovery map from the HDR and SDR inputs, append the recovery map to the end of the
+ * Generate gain map from the HDR and SDR inputs, append the gain map to the end of the
* compressed JPEG. HDR and SDR inputs must be the same resolution and color space.
* @param uncompressed_p010_image uncompressed HDR image in P010 color format
* @param uncompressed_yuv_420_image uncompressed SDR image in YUV_420 color format
@@ -204,8 +204,8 @@
*
* This method requires HAL Hardware JPEG encoder.
*
- * Decode the compressed 8-bit JPEG image to YUV SDR, generate recovery map from the HDR input
- * and the decoded SDR result, append the recovery map to the end of the compressed JPEG. HDR
+ * Decode the compressed 8-bit JPEG image to YUV SDR, generate gain map from the HDR input
+ * and the decoded SDR result, append the gain map to the end of the compressed JPEG. HDR
* and SDR inputs must be the same resolution.
* @param uncompressed_p010_image uncompressed HDR image in P010 color format
* @param compressed_jpeg_image compressed 8-bit JPEG image
@@ -242,9 +242,9 @@
----------------------------------------------------------------------
| JPEGR_OUTPUT_HDR_HLG | RGBA_1010102 HLG |
----------------------------------------------------------------------
- * @param recovery_map destination of the decoded recovery map. The default value is NULL where
+ * @param gain_map destination of the decoded gain map. The default value is NULL where
the decoder will do nothing about it. If configured not NULL the decoder
- will write the decoded recovery_map data into this structure. The format
+ will write the decoded gain_map data into this structure. The format
is defined in {@code jpegr_uncompressed_struct}.
* @param metadata destination of the decoded metadata. The default value is NULL where the
decoder will do nothing about it. If configured not NULL the decoder will
@@ -257,7 +257,7 @@
float max_display_boost = FLT_MAX,
jr_exif_ptr exif = nullptr,
jpegr_output_format output_format = JPEGR_OUTPUT_HDR_LINEAR,
- jr_uncompressed_ptr recovery_map = nullptr,
+ jr_uncompressed_ptr gain_map = nullptr,
jr_metadata_ptr metadata = nullptr);
/*
@@ -274,30 +274,30 @@
protected:
/*
* This method is called in the encoding pipeline. It will take the uncompressed 8-bit and
- * 10-bit yuv images as input, and calculate the uncompressed recovery map. The input images
+ * 10-bit yuv images as input, and calculate the uncompressed gain map. The input images
* must be the same resolution.
*
* @param uncompressed_yuv_420_image uncompressed SDR image in YUV_420 color format
* @param uncompressed_p010_image uncompressed HDR image in P010 color format
* @param hdr_tf transfer function of the HDR image
- * @param dest recovery map; caller responsible for memory of data
+ * @param dest gain map; caller responsible for memory of data
* @param metadata max_content_boost is filled in
* @return NO_ERROR if calculation succeeds, error code if error occurs.
*/
- status_t generateRecoveryMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
- jr_uncompressed_ptr uncompressed_p010_image,
- jpegr_transfer_function hdr_tf,
- jr_metadata_ptr metadata,
- jr_uncompressed_ptr dest);
+ status_t generateGainMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
+ jr_uncompressed_ptr uncompressed_p010_image,
+ jpegr_transfer_function hdr_tf,
+ jr_metadata_ptr metadata,
+ jr_uncompressed_ptr dest);
/*
* This method is called in the decoding pipeline. It will take the uncompressed (decoded)
- * 8-bit yuv image, the uncompressed (decoded) recovery map, and extracted JPEG/R metadata as
+ * 8-bit yuv image, the uncompressed (decoded) gain map, and extracted JPEG/R metadata as
* input, and calculate the 10-bit recovered image. The recovered output image is the same
* color gamut as the SDR image, with HLG transfer function, and is in RGBA1010102 data format.
*
* @param uncompressed_yuv_420_image uncompressed SDR image in YUV_420 color format
- * @param uncompressed_recovery_map uncompressed recovery map
+ * @param uncompressed_gain_map uncompressed gain map
* @param metadata JPEG/R metadata extracted from XMP.
* @param output_format flag for setting output color format. if set to
* {@code JPEGR_OUTPUT_SDR}, decoder will only decode the primary image
@@ -306,67 +306,67 @@
* @param dest reconstructed HDR image
* @return NO_ERROR if calculation succeeds, error code if error occurs.
*/
- status_t applyRecoveryMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
- jr_uncompressed_ptr uncompressed_recovery_map,
- jr_metadata_ptr metadata,
- jpegr_output_format output_format,
- float max_display_boost,
- jr_uncompressed_ptr dest);
+ status_t applyGainMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
+ jr_uncompressed_ptr uncompressed_gain_map,
+ jr_metadata_ptr metadata,
+ jpegr_output_format output_format,
+ float max_display_boost,
+ jr_uncompressed_ptr dest);
private:
/*
- * This method is called in the encoding pipeline. It will encode the recovery map.
+ * This method is called in the encoding pipeline. It will encode the gain map.
*
- * @param uncompressed_recovery_map uncompressed recovery map
+ * @param uncompressed_gain_map uncompressed gain map
* @param dest encoded recover map
* @return NO_ERROR if encoding succeeds, error code if error occurs.
*/
- status_t compressRecoveryMap(jr_uncompressed_ptr uncompressed_recovery_map,
- jr_compressed_ptr dest);
+ status_t compressGainMap(jr_uncompressed_ptr uncompressed_gain_map,
+ jr_compressed_ptr dest);
/*
- * This methoud is called to separate primary image and recovery map image from JPEGR
+ * This methoud is called to separate primary image and gain map image from JPEGR
*
* @param compressed_jpegr_image compressed JPEGR image
* @param primary_image destination of primary image
- * @param recovery_map destination of compressed recovery map
+ * @param gain_map destination of compressed gain map
* @return NO_ERROR if calculation succeeds, error code if error occurs.
*/
- status_t extractPrimaryImageAndRecoveryMap(jr_compressed_ptr compressed_jpegr_image,
- jr_compressed_ptr primary_image,
- jr_compressed_ptr recovery_map);
+ status_t extractPrimaryImageAndGainMap(jr_compressed_ptr compressed_jpegr_image,
+ jr_compressed_ptr primary_image,
+ jr_compressed_ptr gain_map);
/*
* This method is called in the decoding pipeline. It will read XMP metadata to find the start
- * position of the compressed recovery map, and will extract the compressed recovery map.
+ * position of the compressed gain map, and will extract the compressed gain map.
*
* @param compressed_jpegr_image compressed JPEGR image
- * @param dest destination of compressed recovery map
+ * @param dest destination of compressed gain map
* @return NO_ERROR if calculation succeeds, error code if error occurs.
*/
- status_t extractRecoveryMap(jr_compressed_ptr compressed_jpegr_image,
- jr_compressed_ptr dest);
+ status_t extractGainMap(jr_compressed_ptr compressed_jpegr_image,
+ jr_compressed_ptr dest);
/*
* This method is called in the encoding pipeline. It will take the standard 8-bit JPEG image,
- * the compressed recovery map and optionally the exif package as inputs, and generate the XMP
+ * the compressed gain map and optionally the exif package as inputs, and generate the XMP
* metadata, and finally append everything in the order of:
- * SOI, APP2(EXIF) (if EXIF is from outside), APP2(XMP), primary image, recovery map
+ * SOI, APP2(EXIF) (if EXIF is from outside), APP2(XMP), primary image, gain map
* Note that EXIF package is only available for encoding API-0 and API-1. For encoding API-2 and
* API-3 this parameter is null, but the primary image in JPEG/R may still have EXIF as long as
* the input JPEG has EXIF.
*
* @param compressed_jpeg_image compressed 8-bit JPEG image
- * @param compress_recovery_map compressed recover map
+ * @param compress_gain_map compressed recover map
* @param (nullable) exif EXIF package
* @param metadata JPEG/R metadata to encode in XMP of the jpeg
* @param dest compressed JPEGR image
* @return NO_ERROR if calculation succeeds, error code if error occurs.
*/
- status_t appendRecoveryMap(jr_compressed_ptr compressed_jpeg_image,
- jr_compressed_ptr compressed_recovery_map,
- jr_exif_ptr exif,
- jr_metadata_ptr metadata,
- jr_compressed_ptr dest);
+ status_t appendGainMap(jr_compressed_ptr compressed_jpeg_image,
+ jr_compressed_ptr compressed_gain_map,
+ jr_exif_ptr exif,
+ jr_metadata_ptr metadata,
+ jr_compressed_ptr dest);
/*
* This method will tone map a HDR image to an SDR image.
diff --git a/libs/jpegrecoverymap/jpegr.cpp b/libs/jpegrecoverymap/jpegr.cpp
index cdf685e..2590f63 100644
--- a/libs/jpegrecoverymap/jpegr.cpp
+++ b/libs/jpegrecoverymap/jpegr.cpp
@@ -17,7 +17,7 @@
#include <jpegrecoverymap/jpegr.h>
#include <jpegrecoverymap/jpegencoderhelper.h>
#include <jpegrecoverymap/jpegdecoderhelper.h>
-#include <jpegrecoverymap/recoverymapmath.h>
+#include <jpegrecoverymap/gainmapmath.h>
#include <jpegrecoverymap/jpegrutils.h>
#include <jpegrecoverymap/multipictureformat.h>
#include <jpegrecoverymap/icc.h>
@@ -50,7 +50,7 @@
#define USE_PQ_OETF_LUT 1
#define USE_HLG_INVOETF_LUT 1
#define USE_PQ_INVOETF_LUT 1
-#define USE_APPLY_RECOVERY_LUT 1
+#define USE_APPLY_GAIN_LUT 1
#define JPEGR_CHECK(x) \
{ \
@@ -69,7 +69,7 @@
// JPEG encoding / decoding will require 8 x 8 DCT transform.
// Width must be 8 dividable, and height must be 2 dividable.
static const size_t kJpegBlock = 8;
-// JPEG compress quality (0 ~ 100) for recovery map
+// JPEG compress quality (0 ~ 100) for gain map
static const int kMapCompressQuality = 85;
#define CONFIG_MULTITHREAD 1
@@ -163,7 +163,7 @@
JPEGR_CHECK(toneMap(uncompressed_p010_image, &uncompressed_yuv_420_image));
jpegr_uncompressed_struct map;
- JPEGR_CHECK(generateRecoveryMap(
+ JPEGR_CHECK(generateGainMap(
&uncompressed_yuv_420_image, uncompressed_p010_image, hdr_tf, &metadata, &map));
std::unique_ptr<uint8_t[]> map_data;
map_data.reset(reinterpret_cast<uint8_t*>(map.data));
@@ -172,7 +172,7 @@
compressed_map.maxLength = map.width * map.height;
unique_ptr<uint8_t[]> compressed_map_data = make_unique<uint8_t[]>(compressed_map.maxLength);
compressed_map.data = compressed_map_data.get();
- JPEGR_CHECK(compressRecoveryMap(&map, &compressed_map));
+ JPEGR_CHECK(compressGainMap(&map, &compressed_map));
sp<DataStruct> icc = IccHelper::writeIccProfile(JPEGR_TF_SRGB,
uncompressed_yuv_420_image.colorGamut);
@@ -188,7 +188,7 @@
jpeg.data = jpeg_encoder.getCompressedImagePtr();
jpeg.length = jpeg_encoder.getCompressedImageSize();
- JPEGR_CHECK(appendRecoveryMap(&jpeg, &compressed_map, exif, &metadata, dest));
+ JPEGR_CHECK(appendGainMap(&jpeg, &compressed_map, exif, &metadata, dest));
return NO_ERROR;
}
@@ -219,7 +219,7 @@
metadata.version = kJpegrVersion;
jpegr_uncompressed_struct map;
- JPEGR_CHECK(generateRecoveryMap(
+ JPEGR_CHECK(generateGainMap(
uncompressed_yuv_420_image, uncompressed_p010_image, hdr_tf, &metadata, &map));
std::unique_ptr<uint8_t[]> map_data;
map_data.reset(reinterpret_cast<uint8_t*>(map.data));
@@ -228,7 +228,7 @@
compressed_map.maxLength = map.width * map.height;
unique_ptr<uint8_t[]> compressed_map_data = make_unique<uint8_t[]>(compressed_map.maxLength);
compressed_map.data = compressed_map_data.get();
- JPEGR_CHECK(compressRecoveryMap(&map, &compressed_map));
+ JPEGR_CHECK(compressGainMap(&map, &compressed_map));
sp<DataStruct> icc = IccHelper::writeIccProfile(JPEGR_TF_SRGB,
uncompressed_yuv_420_image->colorGamut);
@@ -244,7 +244,7 @@
jpeg.data = jpeg_encoder.getCompressedImagePtr();
jpeg.length = jpeg_encoder.getCompressedImageSize();
- JPEGR_CHECK(appendRecoveryMap(&jpeg, &compressed_map, exif, &metadata, dest));
+ JPEGR_CHECK(appendGainMap(&jpeg, &compressed_map, exif, &metadata, dest));
return NO_ERROR;
}
@@ -271,7 +271,7 @@
metadata.version = kJpegrVersion;
jpegr_uncompressed_struct map;
- JPEGR_CHECK(generateRecoveryMap(
+ JPEGR_CHECK(generateGainMap(
uncompressed_yuv_420_image, uncompressed_p010_image, hdr_tf, &metadata, &map));
std::unique_ptr<uint8_t[]> map_data;
map_data.reset(reinterpret_cast<uint8_t*>(map.data));
@@ -280,9 +280,9 @@
compressed_map.maxLength = map.width * map.height;
unique_ptr<uint8_t[]> compressed_map_data = make_unique<uint8_t[]>(compressed_map.maxLength);
compressed_map.data = compressed_map_data.get();
- JPEGR_CHECK(compressRecoveryMap(&map, &compressed_map));
+ JPEGR_CHECK(compressGainMap(&map, &compressed_map));
- JPEGR_CHECK(appendRecoveryMap(compressed_jpeg_image, &compressed_map, nullptr, &metadata, dest));
+ JPEGR_CHECK(appendGainMap(compressed_jpeg_image, &compressed_map, nullptr, &metadata, dest));
return NO_ERROR;
}
@@ -322,7 +322,7 @@
metadata.version = kJpegrVersion;
jpegr_uncompressed_struct map;
- JPEGR_CHECK(generateRecoveryMap(
+ JPEGR_CHECK(generateGainMap(
&uncompressed_yuv_420_image, uncompressed_p010_image, hdr_tf, &metadata, &map));
std::unique_ptr<uint8_t[]> map_data;
map_data.reset(reinterpret_cast<uint8_t*>(map.data));
@@ -331,9 +331,9 @@
compressed_map.maxLength = map.width * map.height;
unique_ptr<uint8_t[]> compressed_map_data = make_unique<uint8_t[]>(compressed_map.maxLength);
compressed_map.data = compressed_map_data.get();
- JPEGR_CHECK(compressRecoveryMap(&map, &compressed_map));
+ JPEGR_CHECK(compressGainMap(&map, &compressed_map));
- JPEGR_CHECK(appendRecoveryMap(compressed_jpeg_image, &compressed_map, nullptr, &metadata, dest));
+ JPEGR_CHECK(appendGainMap(compressed_jpeg_image, &compressed_map, nullptr, &metadata, dest));
return NO_ERROR;
}
@@ -343,9 +343,9 @@
return ERROR_JPEGR_INVALID_NULL_PTR;
}
- jpegr_compressed_struct primary_image, recovery_map;
- JPEGR_CHECK(extractPrimaryImageAndRecoveryMap(compressed_jpegr_image,
- &primary_image, &recovery_map));
+ jpegr_compressed_struct primary_image, gain_map;
+ JPEGR_CHECK(extractPrimaryImageAndGainMap(compressed_jpegr_image,
+ &primary_image, &gain_map));
JpegDecoderHelper jpeg_decoder;
if (!jpeg_decoder.getCompressedImageParameters(primary_image.data, primary_image.length,
@@ -363,7 +363,7 @@
float max_display_boost,
jr_exif_ptr exif,
jpegr_output_format output_format,
- jr_uncompressed_ptr recovery_map,
+ jr_uncompressed_ptr gain_map,
jr_metadata_ptr metadata) {
if (compressed_jpegr_image == nullptr || dest == nullptr) {
return ERROR_JPEGR_INVALID_NULL_PTR;
@@ -388,7 +388,7 @@
dest->width = uncompressed_rgba_image.width;
dest->height = uncompressed_rgba_image.height;
- if (recovery_map == nullptr && exif == nullptr) {
+ if (gain_map == nullptr && exif == nullptr) {
return NO_ERROR;
}
@@ -402,30 +402,30 @@
memcpy(exif->data, jpeg_decoder.getEXIFPtr(), jpeg_decoder.getEXIFSize());
exif->length = jpeg_decoder.getEXIFSize();
}
- if (recovery_map == nullptr) {
+ if (gain_map == nullptr) {
return NO_ERROR;
}
}
jpegr_compressed_struct compressed_map;
- JPEGR_CHECK(extractRecoveryMap(compressed_jpegr_image, &compressed_map));
+ JPEGR_CHECK(extractGainMap(compressed_jpegr_image, &compressed_map));
- JpegDecoderHelper recovery_map_decoder;
- if (!recovery_map_decoder.decompressImage(compressed_map.data, compressed_map.length)) {
+ JpegDecoderHelper gain_map_decoder;
+ if (!gain_map_decoder.decompressImage(compressed_map.data, compressed_map.length)) {
return ERROR_JPEGR_DECODE_ERROR;
}
- if (recovery_map != nullptr) {
- recovery_map->width = recovery_map_decoder.getDecompressedImageWidth();
- recovery_map->height = recovery_map_decoder.getDecompressedImageHeight();
- int size = recovery_map->width * recovery_map->height;
- recovery_map->data = malloc(size);
- memcpy(recovery_map->data, recovery_map_decoder.getDecompressedImagePtr(), size);
+ if (gain_map != nullptr) {
+ gain_map->width = gain_map_decoder.getDecompressedImageWidth();
+ gain_map->height = gain_map_decoder.getDecompressedImageHeight();
+ int size = gain_map->width * gain_map->height;
+ gain_map->data = malloc(size);
+ memcpy(gain_map->data, gain_map_decoder.getDecompressedImagePtr(), size);
}
jpegr_metadata_struct jr_metadata;
- if (!getMetadataFromXMP(static_cast<uint8_t*>(recovery_map_decoder.getXMPPtr()),
- recovery_map_decoder.getXMPSize(), &jr_metadata)) {
+ if (!getMetadataFromXMP(static_cast<uint8_t*>(gain_map_decoder.getXMPPtr()),
+ gain_map_decoder.getXMPSize(), &jr_metadata)) {
return ERROR_JPEGR_DECODE_ERROR;
}
@@ -456,30 +456,30 @@
}
jpegr_uncompressed_struct map;
- map.data = recovery_map_decoder.getDecompressedImagePtr();
- map.width = recovery_map_decoder.getDecompressedImageWidth();
- map.height = recovery_map_decoder.getDecompressedImageHeight();
+ map.data = gain_map_decoder.getDecompressedImagePtr();
+ map.width = gain_map_decoder.getDecompressedImageWidth();
+ map.height = gain_map_decoder.getDecompressedImageHeight();
jpegr_uncompressed_struct uncompressed_yuv_420_image;
uncompressed_yuv_420_image.data = jpeg_decoder.getDecompressedImagePtr();
uncompressed_yuv_420_image.width = jpeg_decoder.getDecompressedImageWidth();
uncompressed_yuv_420_image.height = jpeg_decoder.getDecompressedImageHeight();
- JPEGR_CHECK(applyRecoveryMap(&uncompressed_yuv_420_image, &map, &jr_metadata, output_format,
- max_display_boost, dest));
+ JPEGR_CHECK(applyGainMap(&uncompressed_yuv_420_image, &map, &jr_metadata, output_format,
+ max_display_boost, dest));
return NO_ERROR;
}
-status_t JpegR::compressRecoveryMap(jr_uncompressed_ptr uncompressed_recovery_map,
- jr_compressed_ptr dest) {
- if (uncompressed_recovery_map == nullptr || dest == nullptr) {
+status_t JpegR::compressGainMap(jr_uncompressed_ptr uncompressed_gain_map,
+ jr_compressed_ptr dest) {
+ if (uncompressed_gain_map == nullptr || dest == nullptr) {
return ERROR_JPEGR_INVALID_NULL_PTR;
}
JpegEncoderHelper jpeg_encoder;
- if (!jpeg_encoder.compressImage(uncompressed_recovery_map->data,
- uncompressed_recovery_map->width,
- uncompressed_recovery_map->height,
+ if (!jpeg_encoder.compressImage(uncompressed_gain_map->data,
+ uncompressed_gain_map->width,
+ uncompressed_gain_map->height,
kMapCompressQuality,
nullptr,
0,
@@ -554,11 +554,11 @@
mQueuedAllJobs = false;
}
-status_t JpegR::generateRecoveryMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
- jr_uncompressed_ptr uncompressed_p010_image,
- jpegr_transfer_function hdr_tf,
- jr_metadata_ptr metadata,
- jr_uncompressed_ptr dest) {
+status_t JpegR::generateGainMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
+ jr_uncompressed_ptr uncompressed_p010_image,
+ jpegr_transfer_function hdr_tf,
+ jr_metadata_ptr metadata,
+ jr_uncompressed_ptr dest) {
if (uncompressed_yuv_420_image == nullptr
|| uncompressed_p010_image == nullptr
|| metadata == nullptr
@@ -675,7 +675,7 @@
size_t pixel_idx = x + y * dest_map_stride;
reinterpret_cast<uint8_t*>(dest->data)[pixel_idx] =
- encodeRecovery(sdr_y_nits, hdr_y_nits, metadata, log2MinBoost, log2MaxBoost);
+ encodeGain(sdr_y_nits, hdr_y_nits, metadata, log2MinBoost, log2MaxBoost);
}
}
}
@@ -701,14 +701,14 @@
return NO_ERROR;
}
-status_t JpegR::applyRecoveryMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
- jr_uncompressed_ptr uncompressed_recovery_map,
- jr_metadata_ptr metadata,
- jpegr_output_format output_format,
- float max_display_boost,
- jr_uncompressed_ptr dest) {
+status_t JpegR::applyGainMap(jr_uncompressed_ptr uncompressed_yuv_420_image,
+ jr_uncompressed_ptr uncompressed_gain_map,
+ jr_metadata_ptr metadata,
+ jpegr_output_format output_format,
+ float max_display_boost,
+ jr_uncompressed_ptr dest) {
if (uncompressed_yuv_420_image == nullptr
- || uncompressed_recovery_map == nullptr
+ || uncompressed_gain_map == nullptr
|| metadata == nullptr
|| dest == nullptr) {
return ERROR_JPEGR_INVALID_NULL_PTR;
@@ -718,12 +718,12 @@
dest->height = uncompressed_yuv_420_image->height;
ShepardsIDW idwTable(kMapDimensionScaleFactor);
float display_boost = std::min(max_display_boost, metadata->maxContentBoost);
- RecoveryLUT recoveryLUT(metadata, display_boost);
+ GainLUT gainLUT(metadata, display_boost);
JobQueue jobQueue;
- std::function<void()> applyRecMap = [uncompressed_yuv_420_image, uncompressed_recovery_map,
+ std::function<void()> applyRecMap = [uncompressed_yuv_420_image, uncompressed_gain_map,
metadata, dest, &jobQueue, &idwTable, output_format,
- &recoveryLUT, display_boost]() -> void {
+ &gainLUT, display_boost]() -> void {
size_t width = uncompressed_yuv_420_image->width;
size_t height = uncompressed_yuv_420_image->height;
@@ -738,22 +738,22 @@
#else
Color rgb_sdr = srgbInvOetf(rgb_gamma_sdr);
#endif
- float recovery;
+ float gain;
// TODO: determine map scaling factor based on actual map dims
size_t map_scale_factor = kMapDimensionScaleFactor;
// TODO: If map_scale_factor is guaranteed to be an integer, then remove the following.
// Currently map_scale_factor is of type size_t, but it could be changed to a float
// later.
if (map_scale_factor != floorf(map_scale_factor)) {
- recovery = sampleMap(uncompressed_recovery_map, map_scale_factor, x, y);
+ gain = sampleMap(uncompressed_gain_map, map_scale_factor, x, y);
} else {
- recovery = sampleMap(uncompressed_recovery_map, map_scale_factor, x, y, idwTable);
+ gain = sampleMap(uncompressed_gain_map, map_scale_factor, x, y, idwTable);
}
-#if USE_APPLY_RECOVERY_LUT
- Color rgb_hdr = applyRecoveryLUT(rgb_sdr, recovery, recoveryLUT);
+#if USE_APPLY_GAIN_LUT
+ Color rgb_hdr = applyGainLUT(rgb_sdr, gain, gainLUT);
#else
- Color rgb_hdr = applyRecovery(rgb_sdr, recovery, metadata, display_boost);
+ Color rgb_hdr = applyGain(rgb_sdr, gain, metadata, display_boost);
#endif
rgb_hdr = rgb_hdr / display_boost;
size_t pixel_idx = x + y * width;
@@ -815,9 +815,9 @@
return NO_ERROR;
}
-status_t JpegR::extractPrimaryImageAndRecoveryMap(jr_compressed_ptr compressed_jpegr_image,
- jr_compressed_ptr primary_image,
- jr_compressed_ptr recovery_map) {
+status_t JpegR::extractPrimaryImageAndGainMap(jr_compressed_ptr compressed_jpegr_image,
+ jr_compressed_ptr primary_image,
+ jr_compressed_ptr gain_map) {
if (compressed_jpegr_image == nullptr) {
return ERROR_JPEGR_INVALID_NULL_PTR;
}
@@ -855,23 +855,23 @@
primary_image->length = image_ranges[0].GetLength();
}
- if (recovery_map != nullptr) {
- recovery_map->data = static_cast<uint8_t*>(compressed_jpegr_image->data) +
+ if (gain_map != nullptr) {
+ gain_map->data = static_cast<uint8_t*>(compressed_jpegr_image->data) +
image_ranges[1].GetBegin();
- recovery_map->length = image_ranges[1].GetLength();
+ gain_map->length = image_ranges[1].GetLength();
}
return NO_ERROR;
}
-status_t JpegR::extractRecoveryMap(jr_compressed_ptr compressed_jpegr_image,
- jr_compressed_ptr dest) {
+status_t JpegR::extractGainMap(jr_compressed_ptr compressed_jpegr_image,
+ jr_compressed_ptr dest) {
if (compressed_jpegr_image == nullptr || dest == nullptr) {
return ERROR_JPEGR_INVALID_NULL_PTR;
}
- return extractPrimaryImageAndRecoveryMap(compressed_jpegr_image, nullptr, dest);
+ return extractPrimaryImageAndGainMap(compressed_jpegr_image, nullptr, dest);
}
// JPEG/R structure:
@@ -900,20 +900,20 @@
// name space ("http://ns.adobe.com/xap/1.0/\0")
// XMP
//
-// (Required) secondary image (the recovery map, without the first two bytes (SOI))
+// (Required) secondary image (the gain map, without the first two bytes (SOI))
//
// Metadata versions we are using:
// ECMA TR-98 for JFIF marker
// Exif 2.2 spec for EXIF marker
// Adobe XMP spec part 3 for XMP marker
// ICC v4.3 spec for ICC
-status_t JpegR::appendRecoveryMap(jr_compressed_ptr compressed_jpeg_image,
- jr_compressed_ptr compressed_recovery_map,
- jr_exif_ptr exif,
- jr_metadata_ptr metadata,
- jr_compressed_ptr dest) {
+status_t JpegR::appendGainMap(jr_compressed_ptr compressed_jpeg_image,
+ jr_compressed_ptr compressed_gain_map,
+ jr_exif_ptr exif,
+ jr_metadata_ptr metadata,
+ jr_compressed_ptr dest) {
if (compressed_jpeg_image == nullptr
- || compressed_recovery_map == nullptr
+ || compressed_gain_map == nullptr
|| metadata == nullptr
|| dest == nullptr) {
return ERROR_JPEGR_INVALID_NULL_PTR;
@@ -930,7 +930,7 @@
+ xmp_secondary.size(); /* length of xmp packet */
const int secondary_image_size = 2 /* 2 bytes length of APP1 sign */
+ xmp_secondary_length
- + compressed_recovery_map->length;
+ + compressed_gain_map->length;
// primary image
const string xmp_primary = generateXmpForPrimaryImage(secondary_image_size);
// same as primary
@@ -994,7 +994,7 @@
(uint8_t*)compressed_jpeg_image->data + 2, compressed_jpeg_image->length - 2, pos));
// Finish primary image
- // Begin secondary image (recovery map)
+ // Begin secondary image (gain map)
// Write SOI
JPEGR_CHECK(Write(dest, &photos_editing_formats::image_io::JpegMarker::kStart, 1, pos));
JPEGR_CHECK(Write(dest, &photos_editing_formats::image_io::JpegMarker::kSOI, 1, pos));
@@ -1014,7 +1014,7 @@
// Write secondary image
JPEGR_CHECK(Write(dest,
- (uint8_t*)compressed_recovery_map->data + 2, compressed_recovery_map->length - 2, pos));
+ (uint8_t*)compressed_gain_map->data + 2, compressed_gain_map->length - 2, pos));
// Set back length
dest->length = pos;
diff --git a/libs/jpegrecoverymap/tests/Android.bp b/libs/jpegrecoverymap/tests/Android.bp
index d5da7fb..59b1237 100644
--- a/libs/jpegrecoverymap/tests/Android.bp
+++ b/libs/jpegrecoverymap/tests/Android.bp
@@ -26,7 +26,7 @@
test_suites: ["device-tests"],
srcs: [
"jpegr_test.cpp",
- "recoverymapmath_test.cpp",
+ "gainmapmath_test.cpp",
],
shared_libs: [
"libimage_io",
diff --git a/libs/jpegrecoverymap/tests/recoverymapmath_test.cpp b/libs/jpegrecoverymap/tests/gainmapmath_test.cpp
similarity index 72%
rename from libs/jpegrecoverymap/tests/recoverymapmath_test.cpp
rename to libs/jpegrecoverymap/tests/gainmapmath_test.cpp
index 2369a7e..21de2e6 100644
--- a/libs/jpegrecoverymap/tests/recoverymapmath_test.cpp
+++ b/libs/jpegrecoverymap/tests/gainmapmath_test.cpp
@@ -17,14 +17,14 @@
#include <cmath>
#include <gtest/gtest.h>
#include <gmock/gmock.h>
-#include <jpegrecoverymap/recoverymapmath.h>
+#include <jpegrecoverymap/gainmapmath.h>
namespace android::jpegrecoverymap {
-class RecoveryMapMathTest : public testing::Test {
+class GainMapMathTest : public testing::Test {
public:
- RecoveryMapMathTest();
- ~RecoveryMapMathTest();
+ GainMapMathTest();
+ ~GainMapMathTest();
float ComparisonEpsilon() { return 1e-4f; }
float LuminanceEpsilon() { return 1e-2f; }
@@ -88,10 +88,10 @@
return luminance_scaled * scale_factor;
}
- Color Recover(Color yuv_gamma, float recovery, jr_metadata_ptr metadata) {
+ Color Recover(Color yuv_gamma, float gain, jr_metadata_ptr metadata) {
Color rgb_gamma = srgbYuvToRgb(yuv_gamma);
Color rgb = srgbInvOetf(rgb_gamma);
- return applyRecovery(rgb, recovery, metadata);
+ return applyGain(rgb, gain, metadata);
}
jpegr_uncompressed_struct Yuv420Image() {
@@ -193,11 +193,11 @@
virtual void TearDown();
};
-RecoveryMapMathTest::RecoveryMapMathTest() {}
-RecoveryMapMathTest::~RecoveryMapMathTest() {}
+GainMapMathTest::GainMapMathTest() {}
+GainMapMathTest::~GainMapMathTest() {}
-void RecoveryMapMathTest::SetUp() {}
-void RecoveryMapMathTest::TearDown() {}
+void GainMapMathTest::SetUp() {}
+void GainMapMathTest::TearDown() {}
#define EXPECT_RGB_EQ(e1, e2) \
EXPECT_FLOAT_EQ((e1).r, (e2).r); \
@@ -231,7 +231,7 @@
// TODO: a bunch of these tests can be parameterized.
-TEST_F(RecoveryMapMathTest, ColorConstruct) {
+TEST_F(GainMapMathTest, ColorConstruct) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
EXPECT_FLOAT_EQ(e1.r, 0.1f);
@@ -243,7 +243,7 @@
EXPECT_FLOAT_EQ(e1.v, 0.3f);
}
-TEST_F(RecoveryMapMathTest, ColorAddColor) {
+TEST_F(GainMapMathTest, ColorAddColor) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
Color e2 = e1 + e1;
@@ -257,7 +257,7 @@
EXPECT_FLOAT_EQ(e2.b, e1.b * 3.0f);
}
-TEST_F(RecoveryMapMathTest, ColorAddFloat) {
+TEST_F(GainMapMathTest, ColorAddFloat) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
Color e2 = e1 + 0.1f;
@@ -271,7 +271,7 @@
EXPECT_FLOAT_EQ(e2.b, e1.b + 0.2f);
}
-TEST_F(RecoveryMapMathTest, ColorSubtractColor) {
+TEST_F(GainMapMathTest, ColorSubtractColor) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
Color e2 = e1 - e1;
@@ -285,7 +285,7 @@
EXPECT_FLOAT_EQ(e2.b, -e1.b);
}
-TEST_F(RecoveryMapMathTest, ColorSubtractFloat) {
+TEST_F(GainMapMathTest, ColorSubtractFloat) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
Color e2 = e1 - 0.1f;
@@ -299,7 +299,7 @@
EXPECT_FLOAT_EQ(e2.b, e1.b - 0.2f);
}
-TEST_F(RecoveryMapMathTest, ColorMultiplyFloat) {
+TEST_F(GainMapMathTest, ColorMultiplyFloat) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
Color e2 = e1 * 2.0f;
@@ -313,7 +313,7 @@
EXPECT_FLOAT_EQ(e2.b, e1.b * 4.0f);
}
-TEST_F(RecoveryMapMathTest, ColorDivideFloat) {
+TEST_F(GainMapMathTest, ColorDivideFloat) {
Color e1 = {{{ 0.1f, 0.2f, 0.3f }}};
Color e2 = e1 / 2.0f;
@@ -327,7 +327,7 @@
EXPECT_FLOAT_EQ(e2.b, e1.b / 4.0f);
}
-TEST_F(RecoveryMapMathTest, SrgbLuminance) {
+TEST_F(GainMapMathTest, SrgbLuminance) {
EXPECT_FLOAT_EQ(srgbLuminance(RgbBlack()), 0.0f);
EXPECT_FLOAT_EQ(srgbLuminance(RgbWhite()), 1.0f);
EXPECT_FLOAT_EQ(srgbLuminance(RgbRed()), 0.2126f);
@@ -335,7 +335,7 @@
EXPECT_FLOAT_EQ(srgbLuminance(RgbBlue()), 0.0722f);
}
-TEST_F(RecoveryMapMathTest, SrgbYuvToRgb) {
+TEST_F(GainMapMathTest, SrgbYuvToRgb) {
Color rgb_black = srgbYuvToRgb(YuvBlack());
EXPECT_RGB_NEAR(rgb_black, RgbBlack());
@@ -352,7 +352,7 @@
EXPECT_RGB_NEAR(rgb_b, RgbBlue());
}
-TEST_F(RecoveryMapMathTest, SrgbRgbToYuv) {
+TEST_F(GainMapMathTest, SrgbRgbToYuv) {
Color yuv_black = srgbRgbToYuv(RgbBlack());
EXPECT_YUV_NEAR(yuv_black, YuvBlack());
@@ -369,7 +369,7 @@
EXPECT_YUV_NEAR(yuv_b, SrgbYuvBlue());
}
-TEST_F(RecoveryMapMathTest, SrgbRgbYuvRoundtrip) {
+TEST_F(GainMapMathTest, SrgbRgbYuvRoundtrip) {
Color rgb_black = srgbYuvToRgb(srgbRgbToYuv(RgbBlack()));
EXPECT_RGB_NEAR(rgb_black, RgbBlack());
@@ -386,7 +386,7 @@
EXPECT_RGB_NEAR(rgb_b, RgbBlue());
}
-TEST_F(RecoveryMapMathTest, SrgbTransferFunction) {
+TEST_F(GainMapMathTest, SrgbTransferFunction) {
EXPECT_FLOAT_EQ(srgbInvOetf(0.0f), 0.0f);
EXPECT_NEAR(srgbInvOetf(0.02f), 0.00154f, ComparisonEpsilon());
EXPECT_NEAR(srgbInvOetf(0.04045f), 0.00313f, ComparisonEpsilon());
@@ -394,7 +394,7 @@
EXPECT_FLOAT_EQ(srgbInvOetf(1.0f), 1.0f);
}
-TEST_F(RecoveryMapMathTest, P3Luminance) {
+TEST_F(GainMapMathTest, P3Luminance) {
EXPECT_FLOAT_EQ(p3Luminance(RgbBlack()), 0.0f);
EXPECT_FLOAT_EQ(p3Luminance(RgbWhite()), 1.0f);
EXPECT_FLOAT_EQ(p3Luminance(RgbRed()), 0.20949f);
@@ -402,7 +402,7 @@
EXPECT_FLOAT_EQ(p3Luminance(RgbBlue()), 0.06891f);
}
-TEST_F(RecoveryMapMathTest, Bt2100Luminance) {
+TEST_F(GainMapMathTest, Bt2100Luminance) {
EXPECT_FLOAT_EQ(bt2100Luminance(RgbBlack()), 0.0f);
EXPECT_FLOAT_EQ(bt2100Luminance(RgbWhite()), 1.0f);
EXPECT_FLOAT_EQ(bt2100Luminance(RgbRed()), 0.2627f);
@@ -410,7 +410,7 @@
EXPECT_FLOAT_EQ(bt2100Luminance(RgbBlue()), 0.0593f);
}
-TEST_F(RecoveryMapMathTest, Bt2100YuvToRgb) {
+TEST_F(GainMapMathTest, Bt2100YuvToRgb) {
Color rgb_black = bt2100YuvToRgb(YuvBlack());
EXPECT_RGB_NEAR(rgb_black, RgbBlack());
@@ -427,7 +427,7 @@
EXPECT_RGB_NEAR(rgb_b, RgbBlue());
}
-TEST_F(RecoveryMapMathTest, Bt2100RgbToYuv) {
+TEST_F(GainMapMathTest, Bt2100RgbToYuv) {
Color yuv_black = bt2100RgbToYuv(RgbBlack());
EXPECT_YUV_NEAR(yuv_black, YuvBlack());
@@ -444,7 +444,7 @@
EXPECT_YUV_NEAR(yuv_b, Bt2100YuvBlue());
}
-TEST_F(RecoveryMapMathTest, Bt2100RgbYuvRoundtrip) {
+TEST_F(GainMapMathTest, Bt2100RgbYuvRoundtrip) {
Color rgb_black = bt2100YuvToRgb(bt2100RgbToYuv(RgbBlack()));
EXPECT_RGB_NEAR(rgb_black, RgbBlack());
@@ -461,7 +461,7 @@
EXPECT_RGB_NEAR(rgb_b, RgbBlue());
}
-TEST_F(RecoveryMapMathTest, HlgOetf) {
+TEST_F(GainMapMathTest, HlgOetf) {
EXPECT_FLOAT_EQ(hlgOetf(0.0f), 0.0f);
EXPECT_NEAR(hlgOetf(0.04167f), 0.35357f, ComparisonEpsilon());
EXPECT_NEAR(hlgOetf(0.08333f), 0.5f, ComparisonEpsilon());
@@ -473,7 +473,7 @@
EXPECT_RGB_NEAR(hlgOetf(e), e_gamma);
}
-TEST_F(RecoveryMapMathTest, HlgInvOetf) {
+TEST_F(GainMapMathTest, HlgInvOetf) {
EXPECT_FLOAT_EQ(hlgInvOetf(0.0f), 0.0f);
EXPECT_NEAR(hlgInvOetf(0.25f), 0.02083f, ComparisonEpsilon());
EXPECT_NEAR(hlgInvOetf(0.5f), 0.08333f, ComparisonEpsilon());
@@ -485,7 +485,7 @@
EXPECT_RGB_NEAR(hlgInvOetf(e_gamma), e);
}
-TEST_F(RecoveryMapMathTest, HlgTransferFunctionRoundtrip) {
+TEST_F(GainMapMathTest, HlgTransferFunctionRoundtrip) {
EXPECT_FLOAT_EQ(hlgInvOetf(hlgOetf(0.0f)), 0.0f);
EXPECT_NEAR(hlgInvOetf(hlgOetf(0.04167f)), 0.04167f, ComparisonEpsilon());
EXPECT_NEAR(hlgInvOetf(hlgOetf(0.08333f)), 0.08333f, ComparisonEpsilon());
@@ -493,7 +493,7 @@
EXPECT_FLOAT_EQ(hlgInvOetf(hlgOetf(1.0f)), 1.0f);
}
-TEST_F(RecoveryMapMathTest, PqOetf) {
+TEST_F(GainMapMathTest, PqOetf) {
EXPECT_FLOAT_EQ(pqOetf(0.0f), 0.0f);
EXPECT_NEAR(pqOetf(0.01f), 0.50808f, ComparisonEpsilon());
EXPECT_NEAR(pqOetf(0.5f), 0.92655f, ComparisonEpsilon());
@@ -505,7 +505,7 @@
EXPECT_RGB_NEAR(pqOetf(e), e_gamma);
}
-TEST_F(RecoveryMapMathTest, PqInvOetf) {
+TEST_F(GainMapMathTest, PqInvOetf) {
EXPECT_FLOAT_EQ(pqInvOetf(0.0f), 0.0f);
EXPECT_NEAR(pqInvOetf(0.01f), 2.31017e-7f, ComparisonEpsilon());
EXPECT_NEAR(pqInvOetf(0.5f), 0.00922f, ComparisonEpsilon());
@@ -517,99 +517,99 @@
EXPECT_RGB_NEAR(pqInvOetf(e_gamma), e);
}
-TEST_F(RecoveryMapMathTest, PqInvOetfLUT) {
+TEST_F(GainMapMathTest, PqInvOetfLUT) {
for (int idx = 0; idx < kPqInvOETFNumEntries; idx++) {
float value = static_cast<float>(idx) / static_cast<float>(kPqInvOETFNumEntries - 1);
EXPECT_FLOAT_EQ(pqInvOetf(value), pqInvOetfLUT(value));
}
}
-TEST_F(RecoveryMapMathTest, HlgInvOetfLUT) {
+TEST_F(GainMapMathTest, HlgInvOetfLUT) {
for (int idx = 0; idx < kHlgInvOETFNumEntries; idx++) {
float value = static_cast<float>(idx) / static_cast<float>(kHlgInvOETFNumEntries - 1);
EXPECT_FLOAT_EQ(hlgInvOetf(value), hlgInvOetfLUT(value));
}
}
-TEST_F(RecoveryMapMathTest, pqOetfLUT) {
+TEST_F(GainMapMathTest, pqOetfLUT) {
for (int idx = 0; idx < kPqOETFNumEntries; idx++) {
float value = static_cast<float>(idx) / static_cast<float>(kPqOETFNumEntries - 1);
EXPECT_FLOAT_EQ(pqOetf(value), pqOetfLUT(value));
}
}
-TEST_F(RecoveryMapMathTest, hlgOetfLUT) {
+TEST_F(GainMapMathTest, hlgOetfLUT) {
for (int idx = 0; idx < kHlgOETFNumEntries; idx++) {
float value = static_cast<float>(idx) / static_cast<float>(kHlgOETFNumEntries - 1);
EXPECT_FLOAT_EQ(hlgOetf(value), hlgOetfLUT(value));
}
}
-TEST_F(RecoveryMapMathTest, srgbInvOetfLUT) {
+TEST_F(GainMapMathTest, srgbInvOetfLUT) {
for (int idx = 0; idx < kSrgbInvOETFNumEntries; idx++) {
float value = static_cast<float>(idx) / static_cast<float>(kSrgbInvOETFNumEntries - 1);
EXPECT_FLOAT_EQ(srgbInvOetf(value), srgbInvOetfLUT(value));
}
}
-TEST_F(RecoveryMapMathTest, applyRecoveryLUT) {
+TEST_F(GainMapMathTest, applyGainLUT) {
for (int boost = 1; boost <= 10; boost++) {
jpegr_metadata_struct metadata = { .maxContentBoost = static_cast<float>(boost),
.minContentBoost = 1.0f / static_cast<float>(boost) };
- RecoveryLUT recoveryLUT(&metadata);
- RecoveryLUT recoveryLUTWithBoost(&metadata, metadata.maxContentBoost);
- for (int idx = 0; idx < kRecoveryFactorNumEntries; idx++) {
- float value = static_cast<float>(idx) / static_cast<float>(kRecoveryFactorNumEntries - 1);
- EXPECT_RGB_NEAR(applyRecovery(RgbBlack(), value, &metadata),
- applyRecoveryLUT(RgbBlack(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), value, &metadata),
- applyRecoveryLUT(RgbWhite(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbRed(), value, &metadata),
- applyRecoveryLUT(RgbRed(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbGreen(), value, &metadata),
- applyRecoveryLUT(RgbGreen(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbBlue(), value, &metadata),
- applyRecoveryLUT(RgbBlue(), value, recoveryLUT));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbBlack(), value, recoveryLUT),
- applyRecoveryLUT(RgbBlack(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbWhite(), value, recoveryLUT),
- applyRecoveryLUT(RgbWhite(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbRed(), value, recoveryLUT),
- applyRecoveryLUT(RgbRed(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbGreen(), value, recoveryLUT),
- applyRecoveryLUT(RgbGreen(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbBlue(), value, recoveryLUT),
- applyRecoveryLUT(RgbBlue(), value, recoveryLUTWithBoost));
+ GainLUT gainLUT(&metadata);
+ GainLUT gainLUTWithBoost(&metadata, metadata.maxContentBoost);
+ for (int idx = 0; idx < kGainFactorNumEntries; idx++) {
+ float value = static_cast<float>(idx) / static_cast<float>(kGainFactorNumEntries - 1);
+ EXPECT_RGB_NEAR(applyGain(RgbBlack(), value, &metadata),
+ applyGainLUT(RgbBlack(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), value, &metadata),
+ applyGainLUT(RgbWhite(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbRed(), value, &metadata),
+ applyGainLUT(RgbRed(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbGreen(), value, &metadata),
+ applyGainLUT(RgbGreen(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbBlue(), value, &metadata),
+ applyGainLUT(RgbBlue(), value, gainLUT));
+ EXPECT_RGB_EQ(applyGainLUT(RgbBlack(), value, gainLUT),
+ applyGainLUT(RgbBlack(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbWhite(), value, gainLUT),
+ applyGainLUT(RgbWhite(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbRed(), value, gainLUT),
+ applyGainLUT(RgbRed(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbGreen(), value, gainLUT),
+ applyGainLUT(RgbGreen(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbBlue(), value, gainLUT),
+ applyGainLUT(RgbBlue(), value, gainLUTWithBoost));
}
}
for (int boost = 1; boost <= 10; boost++) {
jpegr_metadata_struct metadata = { .maxContentBoost = static_cast<float>(boost),
.minContentBoost = 1.0f };
- RecoveryLUT recoveryLUT(&metadata);
- RecoveryLUT recoveryLUTWithBoost(&metadata, metadata.maxContentBoost);
- for (int idx = 0; idx < kRecoveryFactorNumEntries; idx++) {
- float value = static_cast<float>(idx) / static_cast<float>(kRecoveryFactorNumEntries - 1);
- EXPECT_RGB_NEAR(applyRecovery(RgbBlack(), value, &metadata),
- applyRecoveryLUT(RgbBlack(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), value, &metadata),
- applyRecoveryLUT(RgbWhite(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbRed(), value, &metadata),
- applyRecoveryLUT(RgbRed(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbGreen(), value, &metadata),
- applyRecoveryLUT(RgbGreen(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbBlue(), value, &metadata),
- applyRecoveryLUT(RgbBlue(), value, recoveryLUT));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbBlack(), value, recoveryLUT),
- applyRecoveryLUT(RgbBlack(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbWhite(), value, recoveryLUT),
- applyRecoveryLUT(RgbWhite(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbRed(), value, recoveryLUT),
- applyRecoveryLUT(RgbRed(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbGreen(), value, recoveryLUT),
- applyRecoveryLUT(RgbGreen(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbBlue(), value, recoveryLUT),
- applyRecoveryLUT(RgbBlue(), value, recoveryLUTWithBoost));
+ GainLUT gainLUT(&metadata);
+ GainLUT gainLUTWithBoost(&metadata, metadata.maxContentBoost);
+ for (int idx = 0; idx < kGainFactorNumEntries; idx++) {
+ float value = static_cast<float>(idx) / static_cast<float>(kGainFactorNumEntries - 1);
+ EXPECT_RGB_NEAR(applyGain(RgbBlack(), value, &metadata),
+ applyGainLUT(RgbBlack(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), value, &metadata),
+ applyGainLUT(RgbWhite(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbRed(), value, &metadata),
+ applyGainLUT(RgbRed(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbGreen(), value, &metadata),
+ applyGainLUT(RgbGreen(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbBlue(), value, &metadata),
+ applyGainLUT(RgbBlue(), value, gainLUT));
+ EXPECT_RGB_EQ(applyGainLUT(RgbBlack(), value, gainLUT),
+ applyGainLUT(RgbBlack(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbWhite(), value, gainLUT),
+ applyGainLUT(RgbWhite(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbRed(), value, gainLUT),
+ applyGainLUT(RgbRed(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbGreen(), value, gainLUT),
+ applyGainLUT(RgbGreen(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbBlue(), value, gainLUT),
+ applyGainLUT(RgbBlue(), value, gainLUTWithBoost));
}
}
@@ -617,35 +617,35 @@
jpegr_metadata_struct metadata = { .maxContentBoost = static_cast<float>(boost),
.minContentBoost = 1.0f / pow(static_cast<float>(boost),
1.0f / 3.0f) };
- RecoveryLUT recoveryLUT(&metadata);
- RecoveryLUT recoveryLUTWithBoost(&metadata, metadata.maxContentBoost);
- for (int idx = 0; idx < kRecoveryFactorNumEntries; idx++) {
- float value = static_cast<float>(idx) / static_cast<float>(kRecoveryFactorNumEntries - 1);
- EXPECT_RGB_NEAR(applyRecovery(RgbBlack(), value, &metadata),
- applyRecoveryLUT(RgbBlack(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), value, &metadata),
- applyRecoveryLUT(RgbWhite(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbRed(), value, &metadata),
- applyRecoveryLUT(RgbRed(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbGreen(), value, &metadata),
- applyRecoveryLUT(RgbGreen(), value, recoveryLUT));
- EXPECT_RGB_NEAR(applyRecovery(RgbBlue(), value, &metadata),
- applyRecoveryLUT(RgbBlue(), value, recoveryLUT));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbBlack(), value, recoveryLUT),
- applyRecoveryLUT(RgbBlack(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbWhite(), value, recoveryLUT),
- applyRecoveryLUT(RgbWhite(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbRed(), value, recoveryLUT),
- applyRecoveryLUT(RgbRed(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbGreen(), value, recoveryLUT),
- applyRecoveryLUT(RgbGreen(), value, recoveryLUTWithBoost));
- EXPECT_RGB_EQ(applyRecoveryLUT(RgbBlue(), value, recoveryLUT),
- applyRecoveryLUT(RgbBlue(), value, recoveryLUTWithBoost));
+ GainLUT gainLUT(&metadata);
+ GainLUT gainLUTWithBoost(&metadata, metadata.maxContentBoost);
+ for (int idx = 0; idx < kGainFactorNumEntries; idx++) {
+ float value = static_cast<float>(idx) / static_cast<float>(kGainFactorNumEntries - 1);
+ EXPECT_RGB_NEAR(applyGain(RgbBlack(), value, &metadata),
+ applyGainLUT(RgbBlack(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), value, &metadata),
+ applyGainLUT(RgbWhite(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbRed(), value, &metadata),
+ applyGainLUT(RgbRed(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbGreen(), value, &metadata),
+ applyGainLUT(RgbGreen(), value, gainLUT));
+ EXPECT_RGB_NEAR(applyGain(RgbBlue(), value, &metadata),
+ applyGainLUT(RgbBlue(), value, gainLUT));
+ EXPECT_RGB_EQ(applyGainLUT(RgbBlack(), value, gainLUT),
+ applyGainLUT(RgbBlack(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbWhite(), value, gainLUT),
+ applyGainLUT(RgbWhite(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbRed(), value, gainLUT),
+ applyGainLUT(RgbRed(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbGreen(), value, gainLUT),
+ applyGainLUT(RgbGreen(), value, gainLUTWithBoost));
+ EXPECT_RGB_EQ(applyGainLUT(RgbBlue(), value, gainLUT),
+ applyGainLUT(RgbBlue(), value, gainLUTWithBoost));
}
}
}
-TEST_F(RecoveryMapMathTest, PqTransferFunctionRoundtrip) {
+TEST_F(GainMapMathTest, PqTransferFunctionRoundtrip) {
EXPECT_FLOAT_EQ(pqInvOetf(pqOetf(0.0f)), 0.0f);
EXPECT_NEAR(pqInvOetf(pqOetf(0.01f)), 0.01f, ComparisonEpsilon());
EXPECT_NEAR(pqInvOetf(pqOetf(0.5f)), 0.5f, ComparisonEpsilon());
@@ -653,7 +653,7 @@
EXPECT_FLOAT_EQ(pqInvOetf(pqOetf(1.0f)), 1.0f);
}
-TEST_F(RecoveryMapMathTest, ColorConversionLookup) {
+TEST_F(GainMapMathTest, ColorConversionLookup) {
EXPECT_EQ(getHdrConversionFn(JPEGR_COLORGAMUT_BT709, JPEGR_COLORGAMUT_UNSPECIFIED),
nullptr);
EXPECT_EQ(getHdrConversionFn(JPEGR_COLORGAMUT_BT709, JPEGR_COLORGAMUT_BT709),
@@ -691,139 +691,139 @@
nullptr);
}
-TEST_F(RecoveryMapMathTest, EncodeRecovery) {
+TEST_F(GainMapMathTest, EncodeGain) {
jpegr_metadata_struct metadata = { .maxContentBoost = 4.0f,
.minContentBoost = 1.0f / 4.0f };
- EXPECT_EQ(encodeRecovery(0.0f, 0.0f, &metadata), 127);
- EXPECT_EQ(encodeRecovery(0.0f, 1.0f, &metadata), 127);
- EXPECT_EQ(encodeRecovery(1.0f, 0.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(0.5f, 0.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(0.0f, 0.0f, &metadata), 127);
+ EXPECT_EQ(encodeGain(0.0f, 1.0f, &metadata), 127);
+ EXPECT_EQ(encodeGain(1.0f, 0.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(0.5f, 0.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 1.0f, &metadata), 127);
- EXPECT_EQ(encodeRecovery(1.0f, 4.0f, &metadata), 255);
- EXPECT_EQ(encodeRecovery(1.0f, 5.0f, &metadata), 255);
- EXPECT_EQ(encodeRecovery(4.0f, 1.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(4.0f, 0.5f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 2.0f, &metadata), 191);
- EXPECT_EQ(encodeRecovery(2.0f, 1.0f, &metadata), 63);
+ EXPECT_EQ(encodeGain(1.0f, 1.0f, &metadata), 127);
+ EXPECT_EQ(encodeGain(1.0f, 4.0f, &metadata), 255);
+ EXPECT_EQ(encodeGain(1.0f, 5.0f, &metadata), 255);
+ EXPECT_EQ(encodeGain(4.0f, 1.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(4.0f, 0.5f, &metadata), 0);
+ EXPECT_EQ(encodeGain(1.0f, 2.0f, &metadata), 191);
+ EXPECT_EQ(encodeGain(2.0f, 1.0f, &metadata), 63);
metadata.maxContentBoost = 2.0f;
metadata.minContentBoost = 1.0f / 2.0f;
- EXPECT_EQ(encodeRecovery(1.0f, 2.0f, &metadata), 255);
- EXPECT_EQ(encodeRecovery(2.0f, 1.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 1.41421f, &metadata), 191);
- EXPECT_EQ(encodeRecovery(1.41421f, 1.0f, &metadata), 63);
+ EXPECT_EQ(encodeGain(1.0f, 2.0f, &metadata), 255);
+ EXPECT_EQ(encodeGain(2.0f, 1.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(1.0f, 1.41421f, &metadata), 191);
+ EXPECT_EQ(encodeGain(1.41421f, 1.0f, &metadata), 63);
metadata.maxContentBoost = 8.0f;
metadata.minContentBoost = 1.0f / 8.0f;
- EXPECT_EQ(encodeRecovery(1.0f, 8.0f, &metadata), 255);
- EXPECT_EQ(encodeRecovery(8.0f, 1.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 2.82843f, &metadata), 191);
- EXPECT_EQ(encodeRecovery(2.82843f, 1.0f, &metadata), 63);
+ EXPECT_EQ(encodeGain(1.0f, 8.0f, &metadata), 255);
+ EXPECT_EQ(encodeGain(8.0f, 1.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(1.0f, 2.82843f, &metadata), 191);
+ EXPECT_EQ(encodeGain(2.82843f, 1.0f, &metadata), 63);
metadata.maxContentBoost = 8.0f;
metadata.minContentBoost = 1.0f;
- EXPECT_EQ(encodeRecovery(0.0f, 0.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 0.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(0.0f, 0.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(1.0f, 0.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 1.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 8.0f, &metadata), 255);
- EXPECT_EQ(encodeRecovery(1.0f, 4.0f, &metadata), 170);
- EXPECT_EQ(encodeRecovery(1.0f, 2.0f, &metadata), 85);
+ EXPECT_EQ(encodeGain(1.0f, 1.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(1.0f, 8.0f, &metadata), 255);
+ EXPECT_EQ(encodeGain(1.0f, 4.0f, &metadata), 170);
+ EXPECT_EQ(encodeGain(1.0f, 2.0f, &metadata), 85);
metadata.maxContentBoost = 8.0f;
metadata.minContentBoost = 0.5f;
- EXPECT_EQ(encodeRecovery(0.0f, 0.0f, &metadata), 63);
- EXPECT_EQ(encodeRecovery(1.0f, 0.0f, &metadata), 0);
+ EXPECT_EQ(encodeGain(0.0f, 0.0f, &metadata), 63);
+ EXPECT_EQ(encodeGain(1.0f, 0.0f, &metadata), 0);
- EXPECT_EQ(encodeRecovery(1.0f, 1.0f, &metadata), 63);
- EXPECT_EQ(encodeRecovery(1.0f, 8.0f, &metadata), 255);
- EXPECT_EQ(encodeRecovery(1.0f, 4.0f, &metadata), 191);
- EXPECT_EQ(encodeRecovery(1.0f, 2.0f, &metadata), 127);
- EXPECT_EQ(encodeRecovery(1.0f, 0.7071f, &metadata), 31);
- EXPECT_EQ(encodeRecovery(1.0f, 0.5f, &metadata), 0);
+ EXPECT_EQ(encodeGain(1.0f, 1.0f, &metadata), 63);
+ EXPECT_EQ(encodeGain(1.0f, 8.0f, &metadata), 255);
+ EXPECT_EQ(encodeGain(1.0f, 4.0f, &metadata), 191);
+ EXPECT_EQ(encodeGain(1.0f, 2.0f, &metadata), 127);
+ EXPECT_EQ(encodeGain(1.0f, 0.7071f, &metadata), 31);
+ EXPECT_EQ(encodeGain(1.0f, 0.5f, &metadata), 0);
}
-TEST_F(RecoveryMapMathTest, ApplyRecovery) {
+TEST_F(GainMapMathTest, ApplyGain) {
jpegr_metadata_struct metadata = { .maxContentBoost = 4.0f,
.minContentBoost = 1.0f / 4.0f };
float displayBoost = metadata.maxContentBoost;
- EXPECT_RGB_NEAR(applyRecovery(RgbBlack(), 0.0f, &metadata), RgbBlack());
- EXPECT_RGB_NEAR(applyRecovery(RgbBlack(), 0.5f, &metadata), RgbBlack());
- EXPECT_RGB_NEAR(applyRecovery(RgbBlack(), 1.0f, &metadata), RgbBlack());
+ EXPECT_RGB_NEAR(applyGain(RgbBlack(), 0.0f, &metadata), RgbBlack());
+ EXPECT_RGB_NEAR(applyGain(RgbBlack(), 0.5f, &metadata), RgbBlack());
+ EXPECT_RGB_NEAR(applyGain(RgbBlack(), 1.0f, &metadata), RgbBlack());
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.0f, &metadata), RgbWhite() / 4.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.25f, &metadata), RgbWhite() / 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.5f, &metadata), RgbWhite());
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.75f, &metadata), RgbWhite() * 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 1.0f, &metadata), RgbWhite() * 4.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.0f, &metadata), RgbWhite() / 4.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.25f, &metadata), RgbWhite() / 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.5f, &metadata), RgbWhite());
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.75f, &metadata), RgbWhite() * 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 1.0f, &metadata), RgbWhite() * 4.0f);
metadata.maxContentBoost = 2.0f;
metadata.minContentBoost = 1.0f / 2.0f;
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.0f, &metadata), RgbWhite() / 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.25f, &metadata), RgbWhite() / 1.41421f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.5f, &metadata), RgbWhite());
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.75f, &metadata), RgbWhite() * 1.41421f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 1.0f, &metadata), RgbWhite() * 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.0f, &metadata), RgbWhite() / 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.25f, &metadata), RgbWhite() / 1.41421f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.5f, &metadata), RgbWhite());
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.75f, &metadata), RgbWhite() * 1.41421f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 1.0f, &metadata), RgbWhite() * 2.0f);
metadata.maxContentBoost = 8.0f;
metadata.minContentBoost = 1.0f / 8.0f;
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.0f, &metadata), RgbWhite() / 8.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.25f, &metadata), RgbWhite() / 2.82843f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.5f, &metadata), RgbWhite());
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.75f, &metadata), RgbWhite() * 2.82843f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 1.0f, &metadata), RgbWhite() * 8.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.0f, &metadata), RgbWhite() / 8.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.25f, &metadata), RgbWhite() / 2.82843f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.5f, &metadata), RgbWhite());
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.75f, &metadata), RgbWhite() * 2.82843f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 1.0f, &metadata), RgbWhite() * 8.0f);
metadata.maxContentBoost = 8.0f;
metadata.minContentBoost = 1.0f;
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.0f, &metadata), RgbWhite());
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 1.0f / 3.0f, &metadata), RgbWhite() * 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 2.0f / 3.0f, &metadata), RgbWhite() * 4.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 1.0f, &metadata), RgbWhite() * 8.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.0f, &metadata), RgbWhite());
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 1.0f / 3.0f, &metadata), RgbWhite() * 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 2.0f / 3.0f, &metadata), RgbWhite() * 4.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 1.0f, &metadata), RgbWhite() * 8.0f);
metadata.maxContentBoost = 8.0f;
metadata.minContentBoost = 0.5f;
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.0f, &metadata), RgbWhite() / 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.25f, &metadata), RgbWhite());
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.5f, &metadata), RgbWhite() * 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 0.75f, &metadata), RgbWhite() * 4.0f);
- EXPECT_RGB_NEAR(applyRecovery(RgbWhite(), 1.0f, &metadata), RgbWhite() * 8.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.0f, &metadata), RgbWhite() / 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.25f, &metadata), RgbWhite());
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.5f, &metadata), RgbWhite() * 2.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 0.75f, &metadata), RgbWhite() * 4.0f);
+ EXPECT_RGB_NEAR(applyGain(RgbWhite(), 1.0f, &metadata), RgbWhite() * 8.0f);
Color e = {{{ 0.0f, 0.5f, 1.0f }}};
metadata.maxContentBoost = 4.0f;
metadata.minContentBoost = 1.0f / 4.0f;
- EXPECT_RGB_NEAR(applyRecovery(e, 0.0f, &metadata), e / 4.0f);
- EXPECT_RGB_NEAR(applyRecovery(e, 0.25f, &metadata), e / 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(e, 0.5f, &metadata), e);
- EXPECT_RGB_NEAR(applyRecovery(e, 0.75f, &metadata), e * 2.0f);
- EXPECT_RGB_NEAR(applyRecovery(e, 1.0f, &metadata), e * 4.0f);
+ EXPECT_RGB_NEAR(applyGain(e, 0.0f, &metadata), e / 4.0f);
+ EXPECT_RGB_NEAR(applyGain(e, 0.25f, &metadata), e / 2.0f);
+ EXPECT_RGB_NEAR(applyGain(e, 0.5f, &metadata), e);
+ EXPECT_RGB_NEAR(applyGain(e, 0.75f, &metadata), e * 2.0f);
+ EXPECT_RGB_NEAR(applyGain(e, 1.0f, &metadata), e * 4.0f);
- EXPECT_RGB_EQ(applyRecovery(RgbBlack(), 1.0f, &metadata),
- applyRecovery(RgbBlack(), 1.0f, &metadata, displayBoost));
- EXPECT_RGB_EQ(applyRecovery(RgbWhite(), 1.0f, &metadata),
- applyRecovery(RgbWhite(), 1.0f, &metadata, displayBoost));
- EXPECT_RGB_EQ(applyRecovery(RgbRed(), 1.0f, &metadata),
- applyRecovery(RgbRed(), 1.0f, &metadata, displayBoost));
- EXPECT_RGB_EQ(applyRecovery(RgbGreen(), 1.0f, &metadata),
- applyRecovery(RgbGreen(), 1.0f, &metadata, displayBoost));
- EXPECT_RGB_EQ(applyRecovery(RgbBlue(), 1.0f, &metadata),
- applyRecovery(RgbBlue(), 1.0f, &metadata, displayBoost));
- EXPECT_RGB_EQ(applyRecovery(e, 1.0f, &metadata),
- applyRecovery(e, 1.0f, &metadata, displayBoost));
+ EXPECT_RGB_EQ(applyGain(RgbBlack(), 1.0f, &metadata),
+ applyGain(RgbBlack(), 1.0f, &metadata, displayBoost));
+ EXPECT_RGB_EQ(applyGain(RgbWhite(), 1.0f, &metadata),
+ applyGain(RgbWhite(), 1.0f, &metadata, displayBoost));
+ EXPECT_RGB_EQ(applyGain(RgbRed(), 1.0f, &metadata),
+ applyGain(RgbRed(), 1.0f, &metadata, displayBoost));
+ EXPECT_RGB_EQ(applyGain(RgbGreen(), 1.0f, &metadata),
+ applyGain(RgbGreen(), 1.0f, &metadata, displayBoost));
+ EXPECT_RGB_EQ(applyGain(RgbBlue(), 1.0f, &metadata),
+ applyGain(RgbBlue(), 1.0f, &metadata, displayBoost));
+ EXPECT_RGB_EQ(applyGain(e, 1.0f, &metadata),
+ applyGain(e, 1.0f, &metadata, displayBoost));
}
-TEST_F(RecoveryMapMathTest, GetYuv420Pixel) {
+TEST_F(GainMapMathTest, GetYuv420Pixel) {
jpegr_uncompressed_struct image = Yuv420Image();
Color (*colors)[4] = Yuv420Colors();
@@ -834,7 +834,7 @@
}
}
-TEST_F(RecoveryMapMathTest, GetP010Pixel) {
+TEST_F(GainMapMathTest, GetP010Pixel) {
jpegr_uncompressed_struct image = P010Image();
Color (*colors)[4] = P010Colors();
@@ -845,7 +845,7 @@
}
}
-TEST_F(RecoveryMapMathTest, SampleYuv420) {
+TEST_F(GainMapMathTest, SampleYuv420) {
jpegr_uncompressed_struct image = Yuv420Image();
Color (*colors)[4] = Yuv420Colors();
@@ -871,7 +871,7 @@
}
}
-TEST_F(RecoveryMapMathTest, SampleP010) {
+TEST_F(GainMapMathTest, SampleP010) {
jpegr_uncompressed_struct image = P010Image();
Color (*colors)[4] = P010Colors();
@@ -897,7 +897,7 @@
}
}
-TEST_F(RecoveryMapMathTest, SampleMap) {
+TEST_F(GainMapMathTest, SampleMap) {
jpegr_uncompressed_struct image = MapImage();
float (*values)[4] = MapValues();
@@ -937,7 +937,7 @@
}
}
-TEST_F(RecoveryMapMathTest, ColorToRgba1010102) {
+TEST_F(GainMapMathTest, ColorToRgba1010102) {
EXPECT_EQ(colorToRgba1010102(RgbBlack()), 0x3 << 30);
EXPECT_EQ(colorToRgba1010102(RgbWhite()), 0xFFFFFFFF);
EXPECT_EQ(colorToRgba1010102(RgbRed()), 0x3 << 30 | 0x3ff);
@@ -952,7 +952,7 @@
| static_cast<uint32_t>(0.3f * static_cast<float>(0x3ff)) << 20);
}
-TEST_F(RecoveryMapMathTest, ColorToRgbaF16) {
+TEST_F(GainMapMathTest, ColorToRgbaF16) {
EXPECT_EQ(colorToRgbaF16(RgbBlack()), ((uint64_t) 0x3C00) << 48);
EXPECT_EQ(colorToRgbaF16(RgbWhite()), 0x3C003C003C003C00);
EXPECT_EQ(colorToRgbaF16(RgbRed()), (((uint64_t) 0x3C00) << 48) | ((uint64_t) 0x3C00));
@@ -963,7 +963,7 @@
EXPECT_EQ(colorToRgbaF16(e_gamma), 0x3C0034CD32662E66);
}
-TEST_F(RecoveryMapMathTest, Float32ToFloat16) {
+TEST_F(GainMapMathTest, Float32ToFloat16) {
EXPECT_EQ(floatToHalf(0.1f), 0x2E66);
EXPECT_EQ(floatToHalf(0.0f), 0x0);
EXPECT_EQ(floatToHalf(1.0f), 0x3C00);
@@ -973,7 +973,7 @@
EXPECT_EQ(floatToHalf(0x1.0p-126f), 0x0); // float zero
}
-TEST_F(RecoveryMapMathTest, GenerateMapLuminanceSrgb) {
+TEST_F(GainMapMathTest, GenerateMapLuminanceSrgb) {
EXPECT_FLOAT_EQ(SrgbYuvToLuminance(YuvBlack(), srgbLuminance),
0.0f);
EXPECT_FLOAT_EQ(SrgbYuvToLuminance(YuvWhite(), srgbLuminance),
@@ -986,7 +986,7 @@
srgbLuminance(RgbBlue()) * kSdrWhiteNits, LuminanceEpsilon());
}
-TEST_F(RecoveryMapMathTest, GenerateMapLuminanceSrgbP3) {
+TEST_F(GainMapMathTest, GenerateMapLuminanceSrgbP3) {
EXPECT_FLOAT_EQ(SrgbYuvToLuminance(YuvBlack(), p3Luminance),
0.0f);
EXPECT_FLOAT_EQ(SrgbYuvToLuminance(YuvWhite(), p3Luminance),
@@ -999,7 +999,7 @@
p3Luminance(RgbBlue()) * kSdrWhiteNits, LuminanceEpsilon());
}
-TEST_F(RecoveryMapMathTest, GenerateMapLuminanceSrgbBt2100) {
+TEST_F(GainMapMathTest, GenerateMapLuminanceSrgbBt2100) {
EXPECT_FLOAT_EQ(SrgbYuvToLuminance(YuvBlack(), bt2100Luminance),
0.0f);
EXPECT_FLOAT_EQ(SrgbYuvToLuminance(YuvWhite(), bt2100Luminance),
@@ -1012,7 +1012,7 @@
bt2100Luminance(RgbBlue()) * kSdrWhiteNits, LuminanceEpsilon());
}
-TEST_F(RecoveryMapMathTest, GenerateMapLuminanceHlg) {
+TEST_F(GainMapMathTest, GenerateMapLuminanceHlg) {
EXPECT_FLOAT_EQ(Bt2100YuvToLuminance(YuvBlack(), hlgInvOetf, identityConversion,
bt2100Luminance, kHlgMaxNits),
0.0f);
@@ -1030,7 +1030,7 @@
bt2100Luminance(RgbBlue()) * kHlgMaxNits, LuminanceEpsilon());
}
-TEST_F(RecoveryMapMathTest, GenerateMapLuminancePq) {
+TEST_F(GainMapMathTest, GenerateMapLuminancePq) {
EXPECT_FLOAT_EQ(Bt2100YuvToLuminance(YuvBlack(), pqInvOetf, identityConversion,
bt2100Luminance, kPqMaxNits),
0.0f);
@@ -1048,7 +1048,7 @@
bt2100Luminance(RgbBlue()) * kPqMaxNits, LuminanceEpsilon());
}
-TEST_F(RecoveryMapMathTest, ApplyMap) {
+TEST_F(GainMapMathTest, ApplyMap) {
jpegr_metadata_struct metadata = { .maxContentBoost = 8.0f,
.minContentBoost = 1.0f / 8.0f };
diff --git a/libs/jpegrecoverymap/tests/jpegr_test.cpp b/libs/jpegrecoverymap/tests/jpegr_test.cpp
index 229d7dc..620f431 100644
--- a/libs/jpegrecoverymap/tests/jpegr_test.cpp
+++ b/libs/jpegrecoverymap/tests/jpegr_test.cpp
@@ -16,7 +16,7 @@
#include <jpegrecoverymap/jpegr.h>
#include <jpegrecoverymap/jpegrutils.h>
-#include <jpegrecoverymap/recoverymapmath.h>
+#include <jpegrecoverymap/gainmapmath.h>
#include <fcntl.h>
#include <fstream>
#include <gtest/gtest.h>
@@ -117,18 +117,18 @@
class JpegRBenchmark : public JpegR {
public:
- void BenchmarkGenerateRecoveryMap(jr_uncompressed_ptr yuv420Image, jr_uncompressed_ptr p010Image,
- jr_metadata_ptr metadata, jr_uncompressed_ptr map);
- void BenchmarkApplyRecoveryMap(jr_uncompressed_ptr yuv420Image, jr_uncompressed_ptr map,
- jr_metadata_ptr metadata, jr_uncompressed_ptr dest);
+ void BenchmarkGenerateGainMap(jr_uncompressed_ptr yuv420Image, jr_uncompressed_ptr p010Image,
+ jr_metadata_ptr metadata, jr_uncompressed_ptr map);
+ void BenchmarkApplyGainMap(jr_uncompressed_ptr yuv420Image, jr_uncompressed_ptr map,
+ jr_metadata_ptr metadata, jr_uncompressed_ptr dest);
private:
const int kProfileCount = 10;
};
-void JpegRBenchmark::BenchmarkGenerateRecoveryMap(jr_uncompressed_ptr yuv420Image,
- jr_uncompressed_ptr p010Image,
- jr_metadata_ptr metadata,
- jr_uncompressed_ptr map) {
+void JpegRBenchmark::BenchmarkGenerateGainMap(jr_uncompressed_ptr yuv420Image,
+ jr_uncompressed_ptr p010Image,
+ jr_metadata_ptr metadata,
+ jr_uncompressed_ptr map) {
ASSERT_EQ(yuv420Image->width, p010Image->width);
ASSERT_EQ(yuv420Image->height, p010Image->height);
@@ -136,38 +136,38 @@
timerStart(&genRecMapTime);
for (auto i = 0; i < kProfileCount; i++) {
- ASSERT_EQ(OK, generateRecoveryMap(
+ ASSERT_EQ(OK, generateGainMap(
yuv420Image, p010Image, jpegr_transfer_function::JPEGR_TF_HLG, metadata, map));
if (i != kProfileCount - 1) delete[] static_cast<uint8_t *>(map->data);
}
timerStop(&genRecMapTime);
- ALOGE("Generate Recovery Map:- Res = %i x %i, time = %f ms",
+ ALOGE("Generate Gain Map:- Res = %i x %i, time = %f ms",
yuv420Image->width, yuv420Image->height,
elapsedTime(&genRecMapTime) / (kProfileCount * 1000.f));
}
-void JpegRBenchmark::BenchmarkApplyRecoveryMap(jr_uncompressed_ptr yuv420Image,
- jr_uncompressed_ptr map,
- jr_metadata_ptr metadata,
- jr_uncompressed_ptr dest) {
+void JpegRBenchmark::BenchmarkApplyGainMap(jr_uncompressed_ptr yuv420Image,
+ jr_uncompressed_ptr map,
+ jr_metadata_ptr metadata,
+ jr_uncompressed_ptr dest) {
Timer applyRecMapTime;
timerStart(&applyRecMapTime);
for (auto i = 0; i < kProfileCount; i++) {
- ASSERT_EQ(OK, applyRecoveryMap(yuv420Image, map, metadata, JPEGR_OUTPUT_HDR_HLG,
- metadata->maxContentBoost /* displayBoost */, dest));
+ ASSERT_EQ(OK, applyGainMap(yuv420Image, map, metadata, JPEGR_OUTPUT_HDR_HLG,
+ metadata->maxContentBoost /* displayBoost */, dest));
}
timerStop(&applyRecMapTime);
- ALOGE("Apply Recovery Map:- Res = %i x %i, time = %f ms",
+ ALOGE("Apply Gain Map:- Res = %i x %i, time = %f ms",
yuv420Image->width, yuv420Image->height,
elapsedTime(&applyRecMapTime) / (kProfileCount * 1000.f));
}
TEST_F(JpegRTest, build) {
- // Force all of the recovery map lib to be linked by calling all public functions.
+ // Force all of the gain map lib to be linked by calling all public functions.
JpegR jpegRCodec;
jpegRCodec.encodeJPEGR(nullptr, static_cast<jpegr_transfer_function>(0), nullptr, 0, nullptr);
jpegRCodec.encodeJPEGR(nullptr, nullptr, static_cast<jpegr_transfer_function>(0),
@@ -515,7 +515,7 @@
free(decodedJpegR.data);
}
-TEST_F(JpegRTest, ProfileRecoveryMapFuncs) {
+TEST_F(JpegRTest, ProfileGainMapFuncs) {
const size_t kWidth = TEST_IMAGE_WIDTH;
const size_t kHeight = TEST_IMAGE_HEIGHT;
@@ -545,7 +545,7 @@
.height = 0,
.colorGamut = JPEGR_COLORGAMUT_UNSPECIFIED };
- benchmark.BenchmarkGenerateRecoveryMap(&mRawYuv420Image, &mRawP010Image, &metadata, &map);
+ benchmark.BenchmarkGenerateGainMap(&mRawYuv420Image, &mRawP010Image, &metadata, &map);
const int dstSize = mRawYuv420Image.width * mRawYuv420Image.height * 4;
auto bufferDst = std::make_unique<uint8_t[]>(dstSize);
@@ -554,7 +554,7 @@
.height = 0,
.colorGamut = JPEGR_COLORGAMUT_UNSPECIFIED };
- benchmark.BenchmarkApplyRecoveryMap(&mRawYuv420Image, &map, &metadata, &dest);
+ benchmark.BenchmarkApplyGainMap(&mRawYuv420Image, &map, &metadata, &dest);
}
} // namespace android::recoverymap
diff --git a/services/inputflinger/Android.bp b/services/inputflinger/Android.bp
index b885435..e04481c 100644
--- a/services/inputflinger/Android.bp
+++ b/services/inputflinger/Android.bp
@@ -213,6 +213,7 @@
name: "checkinput",
required: [
// native targets
+ "libgui_test",
"libinput",
"libinputflinger",
"inputflinger_tests",
diff --git a/services/inputflinger/InputListener.cpp b/services/inputflinger/InputListener.cpp
index d33b298..1bc1adf 100644
--- a/services/inputflinger/InputListener.cpp
+++ b/services/inputflinger/InputListener.cpp
@@ -24,7 +24,6 @@
#include <android-base/stringprintf.h>
#include <android/log.h>
-#include <math.h>
#include <utils/Trace.h>
using android::base::StringPrintf;
@@ -47,6 +46,7 @@
void InputListenerInterface::notify(const NotifyArgs& generalArgs) {
Visitor v{
+ [&](const NotifyInputDevicesChangedArgs& args) { notifyInputDevicesChanged(args); },
[&](const NotifyConfigurationChangedArgs& args) { notifyConfigurationChanged(&args); },
[&](const NotifyKeyArgs& args) { notifyKey(&args); },
[&](const NotifyMotionArgs& args) { notifyMotion(&args); },
@@ -73,6 +73,11 @@
QueuedInputListener::QueuedInputListener(InputListenerInterface& innerListener)
: mInnerListener(innerListener) {}
+void QueuedInputListener::notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) {
+ traceEvent(__func__, args.id);
+ mArgsQueue.emplace_back(args);
+}
+
void QueuedInputListener::notifyConfigurationChanged(
const NotifyConfigurationChangedArgs* args) {
traceEvent(__func__, args->id);
diff --git a/services/inputflinger/InputManager.cpp b/services/inputflinger/InputManager.cpp
index 9182503..472d7a1 100644
--- a/services/inputflinger/InputManager.cpp
+++ b/services/inputflinger/InputManager.cpp
@@ -110,10 +110,6 @@
return *mReader;
}
-UnwantedInteractionBlockerInterface& InputManager::getBlocker() {
- return *mBlocker;
-}
-
InputProcessorInterface& InputManager::getProcessor() {
return *mProcessor;
}
@@ -129,6 +125,17 @@
mDispatcher->monitor();
}
+void InputManager::dump(std::string& dump) {
+ mReader->dump(dump);
+ dump += '\n';
+ mBlocker->dump(dump);
+ dump += '\n';
+ mProcessor->dump(dump);
+ dump += '\n';
+ mDispatcher->dump(dump);
+ dump += '\n';
+}
+
// Used by tests only.
binder::Status InputManager::createInputChannel(const std::string& name, InputChannel* outChannel) {
IPCThreadState* ipc = IPCThreadState::self();
diff --git a/services/inputflinger/InputManager.h b/services/inputflinger/InputManager.h
index 1137193..793757d 100644
--- a/services/inputflinger/InputManager.h
+++ b/services/inputflinger/InputManager.h
@@ -82,9 +82,6 @@
/* Gets the input reader. */
virtual InputReaderInterface& getReader() = 0;
- /* Gets the unwanted interaction blocker. */
- virtual UnwantedInteractionBlockerInterface& getBlocker() = 0;
-
/* Gets the input processor */
virtual InputProcessorInterface& getProcessor() = 0;
@@ -93,6 +90,9 @@
/* Check that the input stages have not deadlocked. */
virtual void monitor() = 0;
+
+ /* Dump the state of the components controlled by the input manager. */
+ virtual void dump(std::string& dump) = 0;
};
class InputManager : public InputManagerInterface, public BnInputFlinger {
@@ -108,10 +108,10 @@
status_t stop() override;
InputReaderInterface& getReader() override;
- UnwantedInteractionBlockerInterface& getBlocker() override;
InputProcessorInterface& getProcessor() override;
InputDispatcherInterface& getDispatcher() override;
void monitor() override;
+ void dump(std::string& dump) override;
status_t dump(int fd, const Vector<String16>& args) override;
binder::Status createInputChannel(const std::string& name, InputChannel* outChannel) override;
diff --git a/services/inputflinger/InputProcessor.cpp b/services/inputflinger/InputProcessor.cpp
index a98b383..6c0bcff 100644
--- a/services/inputflinger/InputProcessor.cpp
+++ b/services/inputflinger/InputProcessor.cpp
@@ -413,6 +413,12 @@
}
}
+void InputProcessor::notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) {
+ // pass through
+ mQueuedListener.notify(args);
+ mQueuedListener.flush();
+}
+
void InputProcessor::notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) {
// pass through
mQueuedListener.notifyConfigurationChanged(args);
diff --git a/services/inputflinger/InputProcessor.h b/services/inputflinger/InputProcessor.h
index f4d02b6..01795a8 100644
--- a/services/inputflinger/InputProcessor.h
+++ b/services/inputflinger/InputProcessor.h
@@ -245,6 +245,7 @@
public:
explicit InputProcessor(InputListenerInterface& listener);
+ void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) override;
void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) override;
void notifyKey(const NotifyKeyArgs* args) override;
void notifyMotion(const NotifyMotionArgs* args) override;
diff --git a/services/inputflinger/NotifyArgs.cpp b/services/inputflinger/NotifyArgs.cpp
index 5f2a22f..408fbed 100644
--- a/services/inputflinger/NotifyArgs.cpp
+++ b/services/inputflinger/NotifyArgs.cpp
@@ -29,6 +29,12 @@
namespace android {
+// --- NotifyInputDevicesChangedArgs ---
+
+NotifyInputDevicesChangedArgs::NotifyInputDevicesChangedArgs(int32_t id,
+ std::vector<InputDeviceInfo> infos)
+ : id(id), inputDeviceInfos(std::move(infos)) {}
+
// --- NotifyConfigurationChangedArgs ---
NotifyConfigurationChangedArgs::NotifyConfigurationChangedArgs(int32_t id, nsecs_t eventTime)
@@ -234,6 +240,7 @@
const char* toString(const NotifyArgs& args) {
Visitor toStringVisitor{
+ [&](const NotifyInputDevicesChangedArgs&) { return "NotifyInputDevicesChangedArgs"; },
[&](const NotifyConfigurationChangedArgs&) { return "NotifyConfigurationChangedArgs"; },
[&](const NotifyKeyArgs&) { return "NotifyKeyArgs"; },
[&](const NotifyMotionArgs&) { return "NotifyMotionArgs"; },
diff --git a/services/inputflinger/UnwantedInteractionBlocker.cpp b/services/inputflinger/UnwantedInteractionBlocker.cpp
index ae20f86..6d43e8d 100644
--- a/services/inputflinger/UnwantedInteractionBlocker.cpp
+++ b/services/inputflinger/UnwantedInteractionBlocker.cpp
@@ -411,6 +411,13 @@
}
void UnwantedInteractionBlocker::notifyInputDevicesChanged(
+ const NotifyInputDevicesChangedArgs& args) {
+ onInputDevicesChanged(args.inputDeviceInfos);
+ mQueuedListener.notify(args);
+ mQueuedListener.flush();
+}
+
+void UnwantedInteractionBlocker::onInputDevicesChanged(
const std::vector<InputDeviceInfo>& inputDevices) {
std::scoped_lock lock(mLock);
if (!mEnablePalmRejection) {
diff --git a/services/inputflinger/UnwantedInteractionBlocker.h b/services/inputflinger/UnwantedInteractionBlocker.h
index 5d0dde8..3bc5240 100644
--- a/services/inputflinger/UnwantedInteractionBlocker.h
+++ b/services/inputflinger/UnwantedInteractionBlocker.h
@@ -90,6 +90,7 @@
explicit UnwantedInteractionBlocker(InputListenerInterface& listener);
explicit UnwantedInteractionBlocker(InputListenerInterface& listener, bool enablePalmRejection);
+ void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) override;
void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) override;
void notifyKey(const NotifyKeyArgs* args) override;
void notifyMotion(const NotifyMotionArgs* args) override;
@@ -99,7 +100,6 @@
void notifyDeviceReset(const NotifyDeviceResetArgs* args) override;
void notifyPointerCaptureChanged(const NotifyPointerCaptureChangedArgs* args) override;
- void notifyInputDevicesChanged(const std::vector<InputDeviceInfo>& inputDevices) override;
void dump(std::string& dump) override;
void monitor() override;
@@ -123,6 +123,8 @@
// Call this function for outbound events so that they can be logged when logging is enabled.
void enqueueOutboundMotionLocked(const NotifyMotionArgs& args) REQUIRES(mLock);
+
+ void onInputDevicesChanged(const std::vector<InputDeviceInfo>& inputDevices);
};
class SlotState {
diff --git a/services/inputflinger/dispatcher/InputDispatcher.cpp b/services/inputflinger/dispatcher/InputDispatcher.cpp
index 851f13c..c39c408 100644
--- a/services/inputflinger/dispatcher/InputDispatcher.cpp
+++ b/services/inputflinger/dispatcher/InputDispatcher.cpp
@@ -117,11 +117,7 @@
return systemTime(SYSTEM_TIME_MONOTONIC);
}
-inline const char* toString(bool value) {
- return value ? "true" : "false";
-}
-
-inline const std::string toString(const sp<IBinder>& binder) {
+inline const std::string binderToString(const sp<IBinder>& binder) {
if (binder == nullptr) {
return "<null>";
}
@@ -2909,7 +2905,7 @@
info->frameBottom, dumpRegion(info->touchableRegion).c_str(),
info->name.c_str(), info->inputConfig.string().c_str(),
toString(info->token != nullptr), info->applicationInfo.name.c_str(),
- toString(info->applicationInfo.token).c_str());
+ binderToString(info->applicationInfo.token).c_str());
}
bool InputDispatcher::isTouchTrustedLocked(const TouchOcclusionInfo& occlusionInfo) const {
@@ -3623,8 +3619,8 @@
const sp<Connection>& connection,
bool notify) {
if (DEBUG_DISPATCH_CYCLE) {
- ALOGD("channel '%s' ~ abortBrokenDispatchCycle - notify=%s",
- connection->getInputChannelName().c_str(), toString(notify));
+ LOG(DEBUG) << "channel '" << connection->getInputChannelName() << "'~ " << __func__
+ << " - notify=" << toString(notify);
}
// Clear the dispatch queues.
@@ -4376,10 +4372,10 @@
std::chrono::milliseconds timeout,
uint32_t policyFlags) {
if (debugInboundEventDetails()) {
- ALOGD("injectInputEvent - eventType=%d, targetUid=%s, syncMode=%d, timeout=%lld, "
- "policyFlags=0x%08x",
- event->getType(), targetUid ? std::to_string(*targetUid).c_str() : "none", syncMode,
- timeout.count(), policyFlags);
+ LOG(DEBUG) << __func__ << ": targetUid=" << toString(targetUid)
+ << ", syncMode=" << ftl::enum_string(syncMode) << ", timeout=" << timeout.count()
+ << "ms, policyFlags=0x" << std::hex << policyFlags << std::dec
+ << ", event=" << *event;
}
nsecs_t endTime = now() + std::chrono::duration_cast<std::chrono::nanoseconds>(timeout).count();
@@ -4398,7 +4394,7 @@
std::queue<std::unique_ptr<EventEntry>> injectedEntries;
switch (event->getType()) {
- case AINPUT_EVENT_TYPE_KEY: {
+ case InputEventType::KEY: {
const KeyEvent& incomingKey = static_cast<const KeyEvent&>(*event);
int32_t action = incomingKey.getAction();
if (!validateKeyEvent(action)) {
@@ -4444,7 +4440,7 @@
break;
}
- case AINPUT_EVENT_TYPE_MOTION: {
+ case InputEventType::MOTION: {
const MotionEvent& motionEvent = static_cast<const MotionEvent&>(*event);
const int32_t action = motionEvent.getAction();
const bool isPointerEvent =
@@ -4520,7 +4516,7 @@
}
default:
- ALOGW("Cannot inject %s events", inputEventTypeToString(event->getType()));
+ LOG(WARNING) << "Cannot inject " << ftl::enum_string(event->getType()) << " events";
return InputEventInjectionResult::FAILED;
}
@@ -4610,14 +4606,14 @@
std::array<uint8_t, 32> calculatedHmac;
std::unique_ptr<VerifiedInputEvent> result;
switch (event.getType()) {
- case AINPUT_EVENT_TYPE_KEY: {
+ case InputEventType::KEY: {
const KeyEvent& keyEvent = static_cast<const KeyEvent&>(event);
VerifiedKeyEvent verifiedKeyEvent = verifiedKeyEventFromKeyEvent(keyEvent);
result = std::make_unique<VerifiedKeyEvent>(verifiedKeyEvent);
calculatedHmac = sign(verifiedKeyEvent);
break;
}
- case AINPUT_EVENT_TYPE_MOTION: {
+ case InputEventType::MOTION: {
const MotionEvent& motionEvent = static_cast<const MotionEvent&>(event);
VerifiedMotionEvent verifiedMotionEvent =
verifiedMotionEventFromMotionEvent(motionEvent);
@@ -5519,14 +5515,14 @@
windowInfo->frameTop, windowInfo->frameRight,
windowInfo->frameBottom, windowInfo->globalScaleFactor,
windowInfo->applicationInfo.name.c_str(),
- toString(windowInfo->applicationInfo.token).c_str());
+ binderToString(windowInfo->applicationInfo.token).c_str());
dump += dumpRegion(windowInfo->touchableRegion);
dump += StringPrintf(", ownerPid=%d, ownerUid=%d, dispatchingTimeout=%" PRId64
"ms, hasToken=%s, "
"touchOcclusionMode=%s\n",
windowInfo->ownerPid, windowInfo->ownerUid,
millis(windowInfo->dispatchingTimeout),
- toString(windowInfo->token != nullptr),
+ binderToString(windowInfo->token).c_str(),
toString(windowInfo->touchOcclusionMode).c_str());
windowInfo->transform.dump(dump, "transform", INDENT4);
}
diff --git a/services/inputflinger/dispatcher/InputDispatcher.h b/services/inputflinger/dispatcher/InputDispatcher.h
index 2246d47..aaf1214 100644
--- a/services/inputflinger/dispatcher/InputDispatcher.h
+++ b/services/inputflinger/dispatcher/InputDispatcher.h
@@ -93,6 +93,7 @@
status_t start() override;
status_t stop() override;
+ void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) override{};
void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) override;
void notifyKey(const NotifyKeyArgs* args) override;
void notifyMotion(const NotifyMotionArgs* args) override;
diff --git a/services/inputflinger/include/InputListener.h b/services/inputflinger/include/InputListener.h
index 1bb1968..d1b86c8 100644
--- a/services/inputflinger/include/InputListener.h
+++ b/services/inputflinger/include/InputListener.h
@@ -37,6 +37,7 @@
InputListenerInterface& operator=(const InputListenerInterface&) = delete;
virtual ~InputListenerInterface() { }
+ virtual void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) = 0;
virtual void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) = 0;
virtual void notifyKey(const NotifyKeyArgs* args) = 0;
virtual void notifyMotion(const NotifyMotionArgs* args) = 0;
@@ -58,6 +59,7 @@
public:
explicit QueuedInputListener(InputListenerInterface& innerListener);
+ virtual void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) override;
virtual void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) override;
virtual void notifyKey(const NotifyKeyArgs* args) override;
virtual void notifyMotion(const NotifyMotionArgs* args) override;
diff --git a/services/inputflinger/include/NotifyArgs.h b/services/inputflinger/include/NotifyArgs.h
index c46f905..f12482b 100644
--- a/services/inputflinger/include/NotifyArgs.h
+++ b/services/inputflinger/include/NotifyArgs.h
@@ -24,6 +24,20 @@
namespace android {
+/* Describes a change in any of the connected input devices. */
+struct NotifyInputDevicesChangedArgs {
+ int32_t id;
+ std::vector<InputDeviceInfo> inputDeviceInfos;
+
+ inline NotifyInputDevicesChangedArgs() {}
+
+ NotifyInputDevicesChangedArgs(int32_t id, std::vector<InputDeviceInfo> infos);
+
+ bool operator==(const NotifyInputDevicesChangedArgs& rhs) const = default;
+
+ NotifyInputDevicesChangedArgs(const NotifyInputDevicesChangedArgs& other) = default;
+};
+
/* Describes a configuration change event. */
struct NotifyConfigurationChangedArgs {
int32_t id;
@@ -183,7 +197,6 @@
/* Describes a change in the state of Pointer Capture. */
struct NotifyPointerCaptureChangedArgs {
- // The sequence number of the Pointer Capture request, if enabled.
int32_t id;
nsecs_t eventTime;
@@ -211,9 +224,10 @@
NotifyVibratorStateArgs(const NotifyVibratorStateArgs& other) = default;
};
-using NotifyArgs = std::variant<NotifyConfigurationChangedArgs, NotifyKeyArgs, NotifyMotionArgs,
- NotifySensorArgs, NotifySwitchArgs, NotifyDeviceResetArgs,
- NotifyPointerCaptureChangedArgs, NotifyVibratorStateArgs>;
+using NotifyArgs =
+ std::variant<NotifyInputDevicesChangedArgs, NotifyConfigurationChangedArgs, NotifyKeyArgs,
+ NotifyMotionArgs, NotifySensorArgs, NotifySwitchArgs, NotifyDeviceResetArgs,
+ NotifyPointerCaptureChangedArgs, NotifyVibratorStateArgs>;
const char* toString(const NotifyArgs& args);
diff --git a/services/inputflinger/include/UnwantedInteractionBlockerInterface.h b/services/inputflinger/include/UnwantedInteractionBlockerInterface.h
index 1a6f847..64c6114 100644
--- a/services/inputflinger/include/UnwantedInteractionBlockerInterface.h
+++ b/services/inputflinger/include/UnwantedInteractionBlockerInterface.h
@@ -27,23 +27,13 @@
*/
class UnwantedInteractionBlockerInterface : public InputListenerInterface {
public:
- /* Notifies the input reader policy that some input devices have changed
- * and provides information about all current input devices.
- * Important! This call should happen on the same thread as the calls to the
- * InputListenerInterface methods.
- * That is, same thread should call 'notifyMotion' and 'notifyInputDevicesChanged' and
- * 'notifyDeviceReset'. If this architecture changes, we will need to make the implementation
- * of this interface thread-safe.
- */
- virtual void notifyInputDevicesChanged(const std::vector<InputDeviceInfo>& inputDevices) = 0;
-
/**
* Dump the state of the interaction blocker.
* This method may be called on any thread (usually by the input manager on a binder thread).
*/
virtual void dump(std::string& dump) = 0;
- /* Called by the heatbeat to ensures that the blocker has not deadlocked. */
+ /* Called by the heartbeat to ensures that the blocker has not deadlocked. */
virtual void monitor() = 0;
UnwantedInteractionBlockerInterface() {}
diff --git a/services/inputflinger/reader/InputReader.cpp b/services/inputflinger/reader/InputReader.cpp
index 6b8bc51..6f54faa 100644
--- a/services/inputflinger/reader/InputReader.cpp
+++ b/services/inputflinger/reader/InputReader.cpp
@@ -157,6 +157,8 @@
if (oldGeneration != mGeneration) {
inputDevicesChanged = true;
inputDevices = getInputDevicesLocked();
+ notifyArgs.emplace_back(
+ NotifyInputDevicesChangedArgs{mContext.getNextId(), inputDevices});
}
} // release lock
diff --git a/services/inputflinger/reader/mapper/TouchpadInputMapper.cpp b/services/inputflinger/reader/mapper/TouchpadInputMapper.cpp
index ac1ba14..5a1ced4 100644
--- a/services/inputflinger/reader/mapper/TouchpadInputMapper.cpp
+++ b/services/inputflinger/reader/mapper/TouchpadInputMapper.cpp
@@ -57,7 +57,7 @@
{std::numeric_limits<double>::infinity(), 15.04, -857.758},
};
-const std::vector<double> sensitivityFactors = {1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 16, 18, 20};
+const std::vector<double> sensitivityFactors = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 16, 18};
std::vector<double> createAccelerationCurveForSensitivity(int32_t sensitivity,
size_t propertySize) {
diff --git a/services/inputflinger/tests/InputDispatcher_test.cpp b/services/inputflinger/tests/InputDispatcher_test.cpp
index fb808eb..5e51bfc 100644
--- a/services/inputflinger/tests/InputDispatcher_test.cpp
+++ b/services/inputflinger/tests/InputDispatcher_test.cpp
@@ -213,7 +213,7 @@
void assertFilterInputEventWasCalled(const NotifyKeyArgs& args) {
assertFilterInputEventWasCalledInternal([&args](const InputEvent& event) {
- ASSERT_EQ(event.getType(), AINPUT_EVENT_TYPE_KEY);
+ ASSERT_EQ(event.getType(), InputEventType::KEY);
EXPECT_EQ(event.getDisplayId(), args.displayId);
const auto& keyEvent = static_cast<const KeyEvent&>(event);
@@ -224,7 +224,7 @@
void assertFilterInputEventWasCalled(const NotifyMotionArgs& args, vec2 point) {
assertFilterInputEventWasCalledInternal([&](const InputEvent& event) {
- ASSERT_EQ(event.getType(), AINPUT_EVENT_TYPE_MOTION);
+ ASSERT_EQ(event.getType(), InputEventType::MOTION);
EXPECT_EQ(event.getDisplayId(), args.displayId);
const auto& motionEvent = static_cast<const MotionEvent&>(event);
@@ -530,17 +530,21 @@
bool filterInputEvent(const InputEvent* inputEvent, uint32_t policyFlags) override {
std::scoped_lock lock(mLock);
switch (inputEvent->getType()) {
- case AINPUT_EVENT_TYPE_KEY: {
+ case InputEventType::KEY: {
const KeyEvent* keyEvent = static_cast<const KeyEvent*>(inputEvent);
mFilteredEvent = std::make_unique<KeyEvent>(*keyEvent);
break;
}
- case AINPUT_EVENT_TYPE_MOTION: {
+ case InputEventType::MOTION: {
const MotionEvent* motionEvent = static_cast<const MotionEvent*>(inputEvent);
mFilteredEvent = std::make_unique<MotionEvent>(*motionEvent);
break;
}
+ default: {
+ ADD_FAILURE() << "Should only filter keys or motions";
+ break;
+ }
}
return true;
}
@@ -924,7 +928,7 @@
ASSERT_EQ(OK, status);
}
- void consumeEvent(int32_t expectedEventType, int32_t expectedAction,
+ void consumeEvent(InputEventType expectedEventType, int32_t expectedAction,
std::optional<int32_t> expectedDisplayId,
std::optional<int32_t> expectedFlags) {
InputEvent* event = consume();
@@ -932,15 +936,15 @@
ASSERT_NE(nullptr, event) << mName.c_str()
<< ": consumer should have returned non-NULL event.";
ASSERT_EQ(expectedEventType, event->getType())
- << mName.c_str() << " expected " << inputEventTypeToString(expectedEventType)
- << " event, got " << inputEventTypeToString(event->getType()) << " event";
+ << mName.c_str() << " expected " << ftl::enum_string(expectedEventType)
+ << " event, got " << *event;
if (expectedDisplayId.has_value()) {
EXPECT_EQ(expectedDisplayId, event->getDisplayId());
}
switch (expectedEventType) {
- case AINPUT_EVENT_TYPE_KEY: {
+ case InputEventType::KEY: {
const KeyEvent& keyEvent = static_cast<const KeyEvent&>(*event);
EXPECT_EQ(expectedAction, keyEvent.getAction());
if (expectedFlags.has_value()) {
@@ -948,7 +952,7 @@
}
break;
}
- case AINPUT_EVENT_TYPE_MOTION: {
+ case InputEventType::MOTION: {
const MotionEvent& motionEvent = static_cast<const MotionEvent&>(*event);
assertMotionAction(expectedAction, motionEvent.getAction());
@@ -957,21 +961,18 @@
}
break;
}
- case AINPUT_EVENT_TYPE_FOCUS: {
+ case InputEventType::FOCUS: {
FAIL() << "Use 'consumeFocusEvent' for FOCUS events";
}
- case AINPUT_EVENT_TYPE_CAPTURE: {
+ case InputEventType::CAPTURE: {
FAIL() << "Use 'consumeCaptureEvent' for CAPTURE events";
}
- case AINPUT_EVENT_TYPE_TOUCH_MODE: {
+ case InputEventType::TOUCH_MODE: {
FAIL() << "Use 'consumeTouchModeEvent' for TOUCH_MODE events";
}
- case AINPUT_EVENT_TYPE_DRAG: {
+ case InputEventType::DRAG: {
FAIL() << "Use 'consumeDragEvent' for DRAG events";
}
- default: {
- FAIL() << mName.c_str() << ": invalid event type: " << expectedEventType;
- }
}
}
@@ -983,9 +984,8 @@
return nullptr;
}
- if (event->getType() != AINPUT_EVENT_TYPE_MOTION) {
- ADD_FAILURE() << mName << " expected a MotionEvent, got "
- << inputEventTypeToString(event->getType()) << " event";
+ if (event->getType() != InputEventType::MOTION) {
+ ADD_FAILURE() << mName << " expected a MotionEvent, got " << *event;
return nullptr;
}
return static_cast<MotionEvent*>(event);
@@ -1001,9 +1001,8 @@
InputEvent* event = consume();
ASSERT_NE(nullptr, event) << mName.c_str()
<< ": consumer should have returned non-NULL event.";
- ASSERT_EQ(AINPUT_EVENT_TYPE_FOCUS, event->getType())
- << "Got " << inputEventTypeToString(event->getType())
- << " event instead of FOCUS event";
+ ASSERT_EQ(InputEventType::FOCUS, event->getType())
+ << "Instead of FocusEvent, got " << *event;
ASSERT_EQ(ADISPLAY_ID_NONE, event->getDisplayId())
<< mName.c_str() << ": event displayId should always be NONE.";
@@ -1016,9 +1015,8 @@
const InputEvent* event = consume();
ASSERT_NE(nullptr, event) << mName.c_str()
<< ": consumer should have returned non-NULL event.";
- ASSERT_EQ(AINPUT_EVENT_TYPE_CAPTURE, event->getType())
- << "Got " << inputEventTypeToString(event->getType())
- << " event instead of CAPTURE event";
+ ASSERT_EQ(InputEventType::CAPTURE, event->getType())
+ << "Instead of CaptureEvent, got " << *event;
ASSERT_EQ(ADISPLAY_ID_NONE, event->getDisplayId())
<< mName.c_str() << ": event displayId should always be NONE.";
@@ -1031,9 +1029,7 @@
const InputEvent* event = consume();
ASSERT_NE(nullptr, event) << mName.c_str()
<< ": consumer should have returned non-NULL event.";
- ASSERT_EQ(AINPUT_EVENT_TYPE_DRAG, event->getType())
- << "Got " << inputEventTypeToString(event->getType())
- << " event instead of DRAG event";
+ ASSERT_EQ(InputEventType::DRAG, event->getType()) << "Instead of DragEvent, got " << *event;
EXPECT_EQ(ADISPLAY_ID_NONE, event->getDisplayId())
<< mName.c_str() << ": event displayId should always be NONE.";
@@ -1048,9 +1044,8 @@
const InputEvent* event = consume();
ASSERT_NE(nullptr, event) << mName.c_str()
<< ": consumer should have returned non-NULL event.";
- ASSERT_EQ(AINPUT_EVENT_TYPE_TOUCH_MODE, event->getType())
- << "Got " << inputEventTypeToString(event->getType())
- << " event instead of TOUCH_MODE event";
+ ASSERT_EQ(InputEventType::TOUCH_MODE, event->getType())
+ << "Instead of TouchModeEvent, got " << *event;
ASSERT_EQ(ADISPLAY_ID_NONE, event->getDisplayId())
<< mName.c_str() << ": event displayId should always be NONE.";
@@ -1063,23 +1058,23 @@
if (event == nullptr) {
return;
}
- if (event->getType() == AINPUT_EVENT_TYPE_KEY) {
+ if (event->getType() == InputEventType::KEY) {
KeyEvent& keyEvent = static_cast<KeyEvent&>(*event);
ADD_FAILURE() << "Received key event "
<< KeyEvent::actionToString(keyEvent.getAction());
- } else if (event->getType() == AINPUT_EVENT_TYPE_MOTION) {
+ } else if (event->getType() == InputEventType::MOTION) {
MotionEvent& motionEvent = static_cast<MotionEvent&>(*event);
ADD_FAILURE() << "Received motion event "
<< MotionEvent::actionToString(motionEvent.getAction());
- } else if (event->getType() == AINPUT_EVENT_TYPE_FOCUS) {
+ } else if (event->getType() == InputEventType::FOCUS) {
FocusEvent& focusEvent = static_cast<FocusEvent&>(*event);
ADD_FAILURE() << "Received focus event, hasFocus = "
<< (focusEvent.getHasFocus() ? "true" : "false");
- } else if (event->getType() == AINPUT_EVENT_TYPE_CAPTURE) {
+ } else if (event->getType() == InputEventType::CAPTURE) {
const auto& captureEvent = static_cast<CaptureEvent&>(*event);
ADD_FAILURE() << "Received capture event, pointerCaptureEnabled = "
<< (captureEvent.getPointerCaptureEnabled() ? "true" : "false");
- } else if (event->getType() == AINPUT_EVENT_TYPE_TOUCH_MODE) {
+ } else if (event->getType() == InputEventType::TOUCH_MODE) {
const auto& touchModeEvent = static_cast<TouchModeEvent&>(*event);
ADD_FAILURE() << "Received touch mode event, inTouchMode = "
<< (touchModeEvent.isInTouchMode() ? "true" : "false");
@@ -1239,12 +1234,11 @@
void setWindowOffset(float offsetX, float offsetY) { mInfo.transform.set(offsetX, offsetY); }
void consumeKeyDown(int32_t expectedDisplayId, int32_t expectedFlags = 0) {
- consumeEvent(AINPUT_EVENT_TYPE_KEY, AKEY_EVENT_ACTION_DOWN, expectedDisplayId,
- expectedFlags);
+ consumeEvent(InputEventType::KEY, AKEY_EVENT_ACTION_DOWN, expectedDisplayId, expectedFlags);
}
void consumeKeyUp(int32_t expectedDisplayId, int32_t expectedFlags = 0) {
- consumeEvent(AINPUT_EVENT_TYPE_KEY, AKEY_EVENT_ACTION_UP, expectedDisplayId, expectedFlags);
+ consumeEvent(InputEventType::KEY, AKEY_EVENT_ACTION_UP, expectedDisplayId, expectedFlags);
}
void consumeMotionCancel(int32_t expectedDisplayId = ADISPLAY_ID_DEFAULT,
@@ -1266,7 +1260,7 @@
void consumeAnyMotionDown(std::optional<int32_t> expectedDisplayId = std::nullopt,
std::optional<int32_t> expectedFlags = std::nullopt) {
- consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_DOWN, expectedDisplayId,
+ consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_DOWN, expectedDisplayId,
expectedFlags);
}
@@ -1275,25 +1269,25 @@
int32_t expectedFlags = 0) {
int32_t action = AMOTION_EVENT_ACTION_POINTER_DOWN |
(pointerIdx << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
- consumeEvent(AINPUT_EVENT_TYPE_MOTION, action, expectedDisplayId, expectedFlags);
+ consumeEvent(InputEventType::MOTION, action, expectedDisplayId, expectedFlags);
}
void consumeMotionPointerUp(int32_t pointerIdx, int32_t expectedDisplayId = ADISPLAY_ID_DEFAULT,
int32_t expectedFlags = 0) {
int32_t action = AMOTION_EVENT_ACTION_POINTER_UP |
(pointerIdx << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
- consumeEvent(AINPUT_EVENT_TYPE_MOTION, action, expectedDisplayId, expectedFlags);
+ consumeEvent(InputEventType::MOTION, action, expectedDisplayId, expectedFlags);
}
void consumeMotionUp(int32_t expectedDisplayId = ADISPLAY_ID_DEFAULT,
int32_t expectedFlags = 0) {
- consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_UP, expectedDisplayId,
+ consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_UP, expectedDisplayId,
expectedFlags);
}
void consumeMotionOutside(int32_t expectedDisplayId = ADISPLAY_ID_DEFAULT,
int32_t expectedFlags = 0) {
- consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_OUTSIDE, expectedDisplayId,
+ consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_OUTSIDE, expectedDisplayId,
expectedFlags);
}
@@ -1301,7 +1295,7 @@
int32_t expectedFlags = 0) {
InputEvent* event = consume();
ASSERT_NE(nullptr, event);
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, event->getType());
+ ASSERT_EQ(InputEventType::MOTION, event->getType());
const MotionEvent& motionEvent = static_cast<MotionEvent&>(*event);
EXPECT_EQ(AMOTION_EVENT_ACTION_OUTSIDE, motionEvent.getActionMasked());
EXPECT_EQ(0.f, motionEvent.getRawPointerCoords(0)->getX());
@@ -1326,7 +1320,7 @@
ASSERT_THAT(*motionEvent, matcher);
}
- void consumeEvent(int32_t expectedEventType, int32_t expectedAction,
+ void consumeEvent(InputEventType expectedEventType, int32_t expectedAction,
std::optional<int32_t> expectedDisplayId,
std::optional<int32_t> expectedFlags) {
ASSERT_NE(mInputReceiver, nullptr) << "Invalid consume event on window with no receiver";
@@ -1375,9 +1369,8 @@
ADD_FAILURE() << "Consume failed : no event";
return nullptr;
}
- if (event->getType() != AINPUT_EVENT_TYPE_MOTION) {
- ADD_FAILURE() << "Instead of motion event, got "
- << inputEventTypeToString(event->getType());
+ if (event->getType() != InputEventType::MOTION) {
+ ADD_FAILURE() << "Instead of motion event, got " << *event;
return nullptr;
}
return static_cast<MotionEvent*>(event);
@@ -3678,7 +3671,7 @@
// on the app side.
NotifyDeviceResetArgs args(/*id=*/10, /*eventTime=*/20, DEVICE_ID);
mDispatcher->notifyDeviceReset(&args);
- window->consumeEvent(AINPUT_EVENT_TYPE_KEY, AKEY_EVENT_ACTION_UP, ADISPLAY_ID_DEFAULT,
+ window->consumeEvent(InputEventType::KEY, AKEY_EVENT_ACTION_UP, ADISPLAY_ID_DEFAULT,
AKEY_EVENT_FLAG_CANCELED);
}
@@ -4752,8 +4745,8 @@
sp<IBinder> getToken() { return mInputReceiver->getToken(); }
void consumeKeyDown(int32_t expectedDisplayId, int32_t expectedFlags = 0) {
- mInputReceiver->consumeEvent(AINPUT_EVENT_TYPE_KEY, AKEY_EVENT_ACTION_DOWN,
- expectedDisplayId, expectedFlags);
+ mInputReceiver->consumeEvent(InputEventType::KEY, AKEY_EVENT_ACTION_DOWN, expectedDisplayId,
+ expectedFlags);
}
std::optional<int32_t> receiveEvent() { return mInputReceiver->receiveEvent(); }
@@ -4761,17 +4754,17 @@
void finishEvent(uint32_t consumeSeq) { return mInputReceiver->finishEvent(consumeSeq); }
void consumeMotionDown(int32_t expectedDisplayId, int32_t expectedFlags = 0) {
- mInputReceiver->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_DOWN,
+ mInputReceiver->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_DOWN,
expectedDisplayId, expectedFlags);
}
void consumeMotionMove(int32_t expectedDisplayId, int32_t expectedFlags = 0) {
- mInputReceiver->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_MOVE,
+ mInputReceiver->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_MOVE,
expectedDisplayId, expectedFlags);
}
void consumeMotionUp(int32_t expectedDisplayId, int32_t expectedFlags = 0) {
- mInputReceiver->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_UP,
+ mInputReceiver->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_UP,
expectedDisplayId, expectedFlags);
}
@@ -4785,7 +4778,7 @@
void consumeMotionPointerDown(int32_t pointerIdx) {
int32_t action = AMOTION_EVENT_ACTION_POINTER_DOWN |
(pointerIdx << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
- mInputReceiver->consumeEvent(AINPUT_EVENT_TYPE_MOTION, action, ADISPLAY_ID_DEFAULT,
+ mInputReceiver->consumeEvent(InputEventType::MOTION, action, ADISPLAY_ID_DEFAULT,
/*expectedFlags=*/0);
}
@@ -4795,8 +4788,8 @@
ADD_FAILURE() << "No event was produced";
return nullptr;
}
- if (event->getType() != AINPUT_EVENT_TYPE_MOTION) {
- ADD_FAILURE() << "Received event of type " << event->getType() << " instead of motion";
+ if (event->getType() != InputEventType::MOTION) {
+ ADD_FAILURE() << "Expected MotionEvent, got " << *event;
return nullptr;
}
return static_cast<MotionEvent*>(event);
@@ -4952,7 +4945,7 @@
motionArgs.pointerCoords[0].getX() - 10);
mDispatcher->notifyMotion(&motionArgs);
- window->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_MOVE, ADISPLAY_ID_DEFAULT,
+ window->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_MOVE, ADISPLAY_ID_DEFAULT,
/*expectedFlags=*/0);
}
@@ -5423,8 +5416,7 @@
InputEvent* repeatEvent = mWindow->consume();
ASSERT_NE(nullptr, repeatEvent);
- uint32_t eventType = repeatEvent->getType();
- ASSERT_EQ(AINPUT_EVENT_TYPE_KEY, eventType);
+ ASSERT_EQ(InputEventType::KEY, repeatEvent->getType());
KeyEvent* repeatKeyEvent = static_cast<KeyEvent*>(repeatEvent);
uint32_t eventAction = repeatKeyEvent->getAction();
@@ -5439,7 +5431,7 @@
mDispatcher->notifyKey(&keyArgs);
// Window should receive key down event.
- mWindow->consumeEvent(AINPUT_EVENT_TYPE_KEY, AKEY_EVENT_ACTION_UP, ADISPLAY_ID_DEFAULT,
+ mWindow->consumeEvent(InputEventType::KEY, AKEY_EVENT_ACTION_UP, ADISPLAY_ID_DEFAULT,
/*expectedFlags=*/0);
}
};
@@ -5612,7 +5604,7 @@
mDispatcher->setInputWindows({{SECOND_DISPLAY_ID, {}}});
// Old focus should receive a cancel event.
- windowInSecondary->consumeEvent(AINPUT_EVENT_TYPE_KEY, AKEY_EVENT_ACTION_UP, ADISPLAY_ID_NONE,
+ windowInSecondary->consumeEvent(InputEventType::KEY, AKEY_EVENT_ACTION_UP, ADISPLAY_ID_NONE,
AKEY_EVENT_FLAG_CANCELED);
// Test inject a key down, should timeout because of no target window.
@@ -5883,7 +5875,7 @@
InputEvent* received = mWindow->consume();
ASSERT_NE(nullptr, received);
ASSERT_EQ(resolvedDeviceId, received->getDeviceId());
- ASSERT_EQ(received->getType(), AINPUT_EVENT_TYPE_KEY);
+ ASSERT_EQ(received->getType(), InputEventType::KEY);
KeyEvent& keyEvent = static_cast<KeyEvent&>(*received);
ASSERT_EQ(flags, keyEvent.getFlags());
}
@@ -5918,7 +5910,7 @@
InputEvent* received = mWindow->consume();
ASSERT_NE(nullptr, received);
ASSERT_EQ(resolvedDeviceId, received->getDeviceId());
- ASSERT_EQ(received->getType(), AINPUT_EVENT_TYPE_MOTION);
+ ASSERT_EQ(received->getType(), InputEventType::MOTION);
MotionEvent& motionEvent = static_cast<MotionEvent&>(*received);
ASSERT_EQ(flags, motionEvent.getFlags());
}
@@ -6099,9 +6091,8 @@
ASSERT_NE(nullptr, event) << name.c_str()
<< ": consumer should have returned non-NULL event.";
- ASSERT_EQ(AINPUT_EVENT_TYPE_MOTION, event->getType())
- << name.c_str() << "expected " << inputEventTypeToString(AINPUT_EVENT_TYPE_MOTION)
- << " event, got " << inputEventTypeToString(event->getType()) << " event";
+ ASSERT_EQ(InputEventType::MOTION, event->getType())
+ << name.c_str() << ": expected MotionEvent, got " << *event;
const MotionEvent& motionEvent = static_cast<const MotionEvent&>(*event);
assertMotionAction(expectedAction, motionEvent.getAction());
@@ -6798,7 +6789,7 @@
FOCUSED_WINDOW_LOCATION))
<< "Inject motion event should return InputEventInjectionResult::SUCCEEDED";
mFocusedWindow->consumeMotionDown();
- mUnfocusedWindow->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_OUTSIDE,
+ mUnfocusedWindow->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_OUTSIDE,
ADISPLAY_ID_DEFAULT, /*flags=*/0);
// We consumed all events, so no ANR
ASSERT_TRUE(mDispatcher->waitForIdle());
@@ -6872,7 +6863,7 @@
// At the same time, FLAG_WATCH_OUTSIDE_TOUCH targets should not receive any events.
TEST_F(InputDispatcherMultiWindowAnr, DuringAnr_SecondTapIsIgnored) {
tapOnFocusedWindow();
- mUnfocusedWindow->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_OUTSIDE,
+ mUnfocusedWindow->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_OUTSIDE,
ADISPLAY_ID_DEFAULT, /*flags=*/0);
// Receive the events, but don't respond
std::optional<uint32_t> downEventSequenceNum = mFocusedWindow->receiveEvent(); // ACTION_DOWN
@@ -7001,7 +6992,7 @@
generateMotionArgs(AMOTION_EVENT_ACTION_DOWN, AINPUT_SOURCE_TOUCHSCREEN,
ADISPLAY_ID_DEFAULT, {FOCUSED_WINDOW_LOCATION});
mDispatcher->notifyMotion(&motionArgs);
- mUnfocusedWindow->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_OUTSIDE,
+ mUnfocusedWindow->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_OUTSIDE,
ADISPLAY_ID_DEFAULT, /*flags=*/0);
// Touch Window 2
@@ -7022,7 +7013,7 @@
ASSERT_TRUE(moveOrCancelSequenceNum);
mFocusedWindow->finishEvent(*moveOrCancelSequenceNum);
ASSERT_NE(nullptr, event);
- ASSERT_EQ(event->getType(), AINPUT_EVENT_TYPE_MOTION);
+ ASSERT_EQ(event->getType(), InputEventType::MOTION);
MotionEvent& motionEvent = static_cast<MotionEvent&>(*event);
if (motionEvent.getAction() == AMOTION_EVENT_ACTION_MOVE) {
mFocusedWindow->consumeMotionCancel();
@@ -8234,7 +8225,7 @@
.displayId(SECOND_DISPLAY_ID)
.pointer(PointerBuilder(0, ToolType::FINGER).x(100).y(100))
.build()));
- windowInSecondary->consumeEvent(AINPUT_EVENT_TYPE_MOTION, AMOTION_EVENT_ACTION_DOWN,
+ windowInSecondary->consumeEvent(InputEventType::MOTION, AMOTION_EVENT_ACTION_DOWN,
SECOND_DISPLAY_ID, /*expectedFlag=*/0);
// Update window again.
mDispatcher->setInputWindows({{SECOND_DISPLAY_ID, {windowInSecondary}}});
diff --git a/services/inputflinger/tests/InputReader_test.cpp b/services/inputflinger/tests/InputReader_test.cpp
index 014cc78..da3fe5b 100644
--- a/services/inputflinger/tests/InputReader_test.cpp
+++ b/services/inputflinger/tests/InputReader_test.cpp
@@ -604,6 +604,7 @@
mReader->loopOnce();
mReader->loopOnce();
ASSERT_NO_FATAL_FAILURE(mFakePolicy->assertInputDevicesChanged());
+ ASSERT_NO_FATAL_FAILURE(mFakeListener->assertNotifyInputDevicesChangedWasCalled());
ASSERT_NO_FATAL_FAILURE(mFakeEventHub->assertQueueIsEmpty());
}
@@ -1344,6 +1345,7 @@
// to the test device will show up in mReader. We wait for those input devices to
// show up before beginning the tests.
ASSERT_NO_FATAL_FAILURE(mFakePolicy->assertInputDevicesChanged());
+ ASSERT_NO_FATAL_FAILURE(mTestListener->assertNotifyInputDevicesChangedWasCalled());
ASSERT_NO_FATAL_FAILURE(mTestListener->assertNotifyConfigurationChangedWasCalled());
}
diff --git a/services/inputflinger/tests/TestInputListener.cpp b/services/inputflinger/tests/TestInputListener.cpp
index 2801072..ac1dc05 100644
--- a/services/inputflinger/tests/TestInputListener.cpp
+++ b/services/inputflinger/tests/TestInputListener.cpp
@@ -29,6 +29,14 @@
TestInputListener::~TestInputListener() {}
+void TestInputListener::assertNotifyInputDevicesChangedWasCalled(
+ NotifyInputDevicesChangedArgs* outEventArgs) {
+ ASSERT_NO_FATAL_FAILURE(
+ assertCalled<NotifyInputDevicesChangedArgs>(outEventArgs,
+ "Expected notifyInputDevicesChanged() "
+ "to have been called."));
+}
+
void TestInputListener::assertNotifyConfigurationChangedWasCalled(
NotifyConfigurationChangedArgs* outEventArgs) {
ASSERT_NO_FATAL_FAILURE(
@@ -168,6 +176,10 @@
mCondition.notify_all();
}
+void TestInputListener::notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) {
+ addToQueue<NotifyInputDevicesChangedArgs>(&args);
+}
+
void TestInputListener::notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) {
addToQueue<NotifyConfigurationChangedArgs>(args);
}
diff --git a/services/inputflinger/tests/TestInputListener.h b/services/inputflinger/tests/TestInputListener.h
index 9665f70..da2cab3 100644
--- a/services/inputflinger/tests/TestInputListener.h
+++ b/services/inputflinger/tests/TestInputListener.h
@@ -35,6 +35,9 @@
using TimePoint = std::chrono::time_point<std::chrono::system_clock>;
+ void assertNotifyInputDevicesChangedWasCalled(
+ NotifyInputDevicesChangedArgs* outEventArgs = nullptr);
+
void assertNotifyConfigurationChangedWasCalled(
NotifyConfigurationChangedArgs* outEventArgs = nullptr);
@@ -76,6 +79,8 @@
template <class NotifyArgsType>
void addToQueue(const NotifyArgsType* args);
+ virtual void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) override;
+
virtual void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) override;
virtual void notifyDeviceReset(const NotifyDeviceResetArgs* args) override;
@@ -97,7 +102,8 @@
const std::chrono::milliseconds mEventHappenedTimeout;
const std::chrono::milliseconds mEventDidNotHappenTimeout;
- std::tuple<std::vector<NotifyConfigurationChangedArgs>, //
+ std::tuple<std::vector<NotifyInputDevicesChangedArgs>, //
+ std::vector<NotifyConfigurationChangedArgs>, //
std::vector<NotifyDeviceResetArgs>, //
std::vector<NotifyKeyArgs>, //
std::vector<NotifyMotionArgs>, //
diff --git a/services/inputflinger/tests/UnwantedInteractionBlocker_test.cpp b/services/inputflinger/tests/UnwantedInteractionBlocker_test.cpp
index 2a9ace0..be731b1 100644
--- a/services/inputflinger/tests/UnwantedInteractionBlocker_test.cpp
+++ b/services/inputflinger/tests/UnwantedInteractionBlocker_test.cpp
@@ -492,7 +492,7 @@
*/
TEST_F(UnwantedInteractionBlockerTest, NoCrashWhenResetHappens) {
NotifyMotionArgs args;
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
mBlocker->notifyMotion(
&(args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/1, DOWN, {{1, 2, 3}})));
mBlocker->notifyMotion(
@@ -505,7 +505,7 @@
}
TEST_F(UnwantedInteractionBlockerTest, NoCrashWhenStylusSourceWithFingerToolIsReceived) {
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
NotifyMotionArgs args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/1, DOWN, {{1, 2, 3}});
args.pointerProperties[0].toolType = ToolType::FINGER;
args.source = AINPUT_SOURCE_STYLUS;
@@ -518,14 +518,14 @@
*/
TEST_F(UnwantedInteractionBlockerTest, NoResetIfDeviceInfoChanges) {
NotifyMotionArgs args;
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
mBlocker->notifyMotion(
&(args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/1, DOWN, {{1, 2, 3}})));
mBlocker->notifyMotion(
&(args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/2, MOVE, {{4, 5, 6}})));
// Now pretend the device changed, even though nothing is different for DEVICE_ID in practice.
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
// The MOVE event continues the gesture that started before 'devices changed', so it should not
// cause a crash.
@@ -538,7 +538,7 @@
*/
TEST_F(UnwantedInteractionBlockerTest, StylusAfterTouchWorks) {
NotifyMotionArgs args;
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/0, DOWN, {{1, 2, 3}});
mBlocker->notifyMotion(&args);
args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/1, MOVE, {{4, 5, 6}});
@@ -568,7 +568,7 @@
* options
*/
TEST_F(UnwantedInteractionBlockerTest, DumpCanBeAccessedOnAnotherThread) {
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
NotifyMotionArgs args1 = generateMotionArgs(/*downTime=*/0, /*eventTime=*/0, DOWN, {{1, 2, 3}});
mBlocker->notifyMotion(&args1);
std::thread dumpThread([this]() {
@@ -587,7 +587,7 @@
* of the touch is large. This is an integration test that checks that this filter kicks in.
*/
TEST_F(UnwantedInteractionBlockerTest, HeuristicFilterWorks) {
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
// Small touch down
NotifyMotionArgs args1 = generateMotionArgs(/*downTime=*/0, /*eventTime=*/0, DOWN, {{1, 2, 3}});
mBlocker->notifyMotion(&args1);
@@ -613,9 +613,9 @@
* This is similar to `HeuristicFilterWorks` test, but for stylus tool.
*/
TEST_F(UnwantedInteractionBlockerTest, StylusIsNotBlocked) {
- InputDeviceInfo info = generateTestDeviceInfo();
- info.addSource(AINPUT_SOURCE_STYLUS);
- mBlocker->notifyInputDevicesChanged({info});
+ NotifyInputDevicesChangedArgs deviceChangedArgs = {/*id=*/0, {generateTestDeviceInfo()}};
+ deviceChangedArgs.inputDeviceInfos[0].addSource(AINPUT_SOURCE_STYLUS);
+ mBlocker->notifyInputDevicesChanged(deviceChangedArgs);
NotifyMotionArgs args1 = generateMotionArgs(/*downTime=*/0, /*eventTime=*/0, DOWN, {{1, 2, 3}});
args1.pointerProperties[0].toolType = ToolType::STYLUS;
mBlocker->notifyMotion(&args1);
@@ -643,9 +643,9 @@
* Stylus event should continue to work even after touch is detected as a palm.
*/
TEST_F(UnwantedInteractionBlockerTest, TouchIsBlockedWhenMixedWithStylus) {
- InputDeviceInfo info = generateTestDeviceInfo();
- info.addSource(AINPUT_SOURCE_STYLUS);
- mBlocker->notifyInputDevicesChanged({info});
+ NotifyInputDevicesChangedArgs deviceChangedArgs = {/*id=*/0, {generateTestDeviceInfo()}};
+ deviceChangedArgs.inputDeviceInfos[0].addSource(AINPUT_SOURCE_STYLUS);
+ mBlocker->notifyInputDevicesChanged(deviceChangedArgs);
// Touch down
NotifyMotionArgs args1 = generateMotionArgs(/*downTime=*/0, /*eventTime=*/0, DOWN, {{1, 2, 3}});
@@ -699,7 +699,7 @@
TEST_F(UnwantedInteractionBlockerTestDeathTest, InconsistentEventAfterResetCausesACrash) {
ScopedSilentDeath _silentDeath;
NotifyMotionArgs args;
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
mBlocker->notifyMotion(
&(args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/1, DOWN, {{1, 2, 3}})));
mBlocker->notifyMotion(
@@ -721,7 +721,7 @@
TEST_F(UnwantedInteractionBlockerTestDeathTest, WhenMoveWithoutDownCausesACrash) {
ScopedSilentDeath _silentDeath;
NotifyMotionArgs args = generateMotionArgs(/*downTime=*/0, /*eventTime=*/1, MOVE, {{1, 2, 3}});
- mBlocker->notifyInputDevicesChanged({generateTestDeviceInfo()});
+ mBlocker->notifyInputDevicesChanged({/*id=*/0, {generateTestDeviceInfo()}});
ASSERT_DEATH({ mBlocker->notifyMotion(&args); }, "Could not find slot");
}
diff --git a/services/inputflinger/tests/fuzzers/MapperHelpers.h b/services/inputflinger/tests/fuzzers/MapperHelpers.h
index 9f4aa5c..0dc627a 100644
--- a/services/inputflinger/tests/fuzzers/MapperHelpers.h
+++ b/services/inputflinger/tests/fuzzers/MapperHelpers.h
@@ -293,6 +293,7 @@
class FuzzInputListener : public virtual InputListenerInterface {
public:
+ void notifyInputDevicesChanged(const NotifyInputDevicesChangedArgs& args) override {}
void notifyConfigurationChanged(const NotifyConfigurationChangedArgs* args) override {}
void notifyKey(const NotifyKeyArgs* args) override {}
void notifyMotion(const NotifyMotionArgs* args) override {}
diff --git a/services/sensorservice/BatteryService.cpp b/services/sensorservice/BatteryService.cpp
index 94de55c..b0fbe5d 100644
--- a/services/sensorservice/BatteryService.cpp
+++ b/services/sensorservice/BatteryService.cpp
@@ -74,6 +74,14 @@
}
}
+void BatteryService::noteWakeupSensorEventImpl(int64_t elapsedNanos, uid_t uid, int handle) {
+ if (checkService()) {
+ int64_t identity = IPCThreadState::self()->clearCallingIdentity();
+ mBatteryStatService->noteWakeupSensorEvent(elapsedNanos, uid, handle);
+ IPCThreadState::self()->restoreCallingIdentity(identity);
+ }
+}
+
bool BatteryService::checkService() {
if (mBatteryStatService == nullptr) {
const sp<IServiceManager> sm(defaultServiceManager());
diff --git a/services/sensorservice/BatteryService.h b/services/sensorservice/BatteryService.h
index 13fc58a..60ef03f 100644
--- a/services/sensorservice/BatteryService.h
+++ b/services/sensorservice/BatteryService.h
@@ -19,11 +19,14 @@
#include <batterystats/IBatteryStats.h>
#include <utils/Singleton.h>
+#include <utils/SortedVector.h>
+#include <utils/SystemClock.h>
namespace android {
// ---------------------------------------------------------------------------
class BatteryService : public Singleton<BatteryService> {
+ static constexpr int64_t WAKEUP_SENSOR_EVENT_DEBOUNCE_MS = 1000;
friend class Singleton<BatteryService>;
sp<IBatteryStats> mBatteryStatService;
@@ -32,6 +35,7 @@
void enableSensorImpl(uid_t uid, int handle);
void disableSensorImpl(uid_t uid, int handle);
+ void noteWakeupSensorEventImpl(int64_t elapsedNanos, uid_t uid, int handle);
struct Info {
uid_t uid;
@@ -44,6 +48,7 @@
}
};
+ int64_t mLastWakeupSensorEventReportedMs;
Mutex mActivationsLock;
SortedVector<Info> mActivations;
bool addSensor(uid_t uid, int handle);
@@ -57,6 +62,15 @@
static void disableSensor(uid_t uid, int handle) {
BatteryService::getInstance().disableSensorImpl(uid, handle);
}
+ static void noteWakeupSensorEvent(int64_t elapsed, uid_t uid, int handle) {
+ BatteryService& instance = BatteryService::getInstance();
+ const int64_t nowElapsedMs = elapsedRealtime();
+ if (nowElapsedMs >= (instance.mLastWakeupSensorEventReportedMs
+ + WAKEUP_SENSOR_EVENT_DEBOUNCE_MS)) {
+ instance.noteWakeupSensorEventImpl(elapsed, uid, handle);
+ instance.mLastWakeupSensorEventReportedMs = nowElapsedMs;
+ }
+ }
};
// ---------------------------------------------------------------------------
diff --git a/services/sensorservice/SensorEventConnection.cpp b/services/sensorservice/SensorEventConnection.cpp
index 7a6b31d..dc5070c 100644
--- a/services/sensorservice/SensorEventConnection.cpp
+++ b/services/sensorservice/SensorEventConnection.cpp
@@ -23,6 +23,7 @@
#include <sensor/SensorEventQueue.h>
#include "vec.h"
+#include "BatteryService.h"
#include "SensorEventConnection.h"
#include "SensorDevice.h"
@@ -391,6 +392,8 @@
if (hasSensorAccess()) {
index_wake_up_event = findWakeUpSensorEventLocked(scratch, count);
if (index_wake_up_event >= 0) {
+ BatteryService::noteWakeupSensorEvent(scratch[index_wake_up_event].timestamp,
+ mUid, scratch[index_wake_up_event].sensor);
scratch[index_wake_up_event].flags |= WAKE_UP_SENSOR_EVENT_NEEDS_ACK;
++mWakeLockRefCount;
#if DEBUG_CONNECTIONS
diff --git a/services/surfaceflinger/Scheduler/Scheduler.cpp b/services/surfaceflinger/Scheduler/Scheduler.cpp
index 3e12db6..8ddcfa1 100644
--- a/services/surfaceflinger/Scheduler/Scheduler.cpp
+++ b/services/surfaceflinger/Scheduler/Scheduler.cpp
@@ -130,7 +130,7 @@
pacesetterVsyncSchedule = promotePacesetterDisplayLocked();
}
- applyNewVsyncSchedule(std::move(pacesetterVsyncSchedule));
+ applyNewVsyncScheduleIfNonNull(std::move(pacesetterVsyncSchedule));
}
void Scheduler::unregisterDisplay(PhysicalDisplayId displayId) {
@@ -149,7 +149,7 @@
pacesetterVsyncSchedule = promotePacesetterDisplayLocked();
}
- applyNewVsyncSchedule(std::move(pacesetterVsyncSchedule));
+ applyNewVsyncScheduleIfNonNull(std::move(pacesetterVsyncSchedule));
}
void Scheduler::run() {
@@ -693,16 +693,17 @@
pacesetterVsyncSchedule = promotePacesetterDisplayLocked(pacesetterIdOpt);
}
- applyNewVsyncSchedule(std::move(pacesetterVsyncSchedule));
+ applyNewVsyncScheduleIfNonNull(std::move(pacesetterVsyncSchedule));
}
std::shared_ptr<VsyncSchedule> Scheduler::promotePacesetterDisplayLocked(
std::optional<PhysicalDisplayId> pacesetterIdOpt) {
// TODO(b/241286431): Choose the pacesetter display.
+ const auto oldPacesetterDisplayIdOpt = mPacesetterDisplayId;
mPacesetterDisplayId = pacesetterIdOpt.value_or(mRefreshRateSelectors.begin()->first);
ALOGI("Display %s is the pacesetter", to_string(*mPacesetterDisplayId).c_str());
- auto vsyncSchedule = getVsyncScheduleLocked(*mPacesetterDisplayId);
+ auto newVsyncSchedule = getVsyncScheduleLocked(*mPacesetterDisplayId);
if (const auto pacesetterPtr = pacesetterSelectorPtrLocked()) {
pacesetterPtr->setIdleTimerCallbacks(
{.platform = {.onReset = [this] { idleTimerCallback(TimerState::Reset); },
@@ -713,15 +714,28 @@
pacesetterPtr->startIdleTimer();
+ // Track the new period, which may have changed due to switching to a
+ // new pacesetter or due to a hotplug event. In the former case, this
+ // is important so that VSYNC modulation does not get stuck in the
+ // initiated state if a transition started on the old pacesetter.
const Fps refreshRate = pacesetterPtr->getActiveMode().modePtr->getFps();
- vsyncSchedule->startPeriodTransition(mSchedulerCallback, refreshRate.getPeriod(),
- true /* force */);
+ newVsyncSchedule->startPeriodTransition(mSchedulerCallback, refreshRate.getPeriod(),
+ true /* force */);
}
- return vsyncSchedule;
+ if (oldPacesetterDisplayIdOpt == mPacesetterDisplayId) {
+ return nullptr;
+ }
+ return newVsyncSchedule;
}
-void Scheduler::applyNewVsyncSchedule(std::shared_ptr<VsyncSchedule> vsyncSchedule) {
- onNewVsyncSchedule(vsyncSchedule->getDispatch());
+void Scheduler::applyNewVsyncScheduleIfNonNull(
+ std::shared_ptr<VsyncSchedule> pacesetterSchedulePtr) {
+ if (!pacesetterSchedulePtr) {
+ // The pacesetter has not changed, so there is no new VsyncSchedule to
+ // apply.
+ return;
+ }
+ onNewVsyncSchedule(pacesetterSchedulePtr->getDispatch());
std::vector<android::EventThread*> threads;
{
std::lock_guard<std::mutex> lock(mConnectionsLock);
@@ -731,7 +745,7 @@
}
}
for (auto* thread : threads) {
- thread->onNewVsyncSchedule(vsyncSchedule);
+ thread->onNewVsyncSchedule(pacesetterSchedulePtr);
}
}
diff --git a/services/surfaceflinger/Scheduler/Scheduler.h b/services/surfaceflinger/Scheduler/Scheduler.h
index 3423652..720a1cb 100644
--- a/services/surfaceflinger/Scheduler/Scheduler.h
+++ b/services/surfaceflinger/Scheduler/Scheduler.h
@@ -329,10 +329,12 @@
// MessageQueue and EventThread need to use the new pacesetter's
// VsyncSchedule, and this must happen while mDisplayLock is *not* locked,
// or else we may deadlock with EventThread.
+ // Returns the new pacesetter's VsyncSchedule, or null if the pacesetter is
+ // unchanged.
std::shared_ptr<VsyncSchedule> promotePacesetterDisplayLocked(
std::optional<PhysicalDisplayId> pacesetterIdOpt = std::nullopt)
REQUIRES(kMainThreadContext, mDisplayLock);
- void applyNewVsyncSchedule(std::shared_ptr<VsyncSchedule>) EXCLUDES(mDisplayLock);
+ void applyNewVsyncScheduleIfNonNull(std::shared_ptr<VsyncSchedule>) EXCLUDES(mDisplayLock);
// Blocks until the pacesetter's idle timer thread exits. `mDisplayLock` must not be locked by
// the caller on the main thread to avoid deadlock, since the timer thread locks it before exit.
diff --git a/services/surfaceflinger/tests/unittests/SchedulerTest.cpp b/services/surfaceflinger/tests/unittests/SchedulerTest.cpp
index dc76b4c..0c43831 100644
--- a/services/surfaceflinger/tests/unittests/SchedulerTest.cpp
+++ b/services/surfaceflinger/tests/unittests/SchedulerTest.cpp
@@ -384,4 +384,23 @@
}
}
+TEST_F(SchedulerTest, changingPacesetterChangesVsyncSchedule) {
+ // Add a second display so we can change the pacesetter.
+ mScheduler->registerDisplay(kDisplayId2,
+ std::make_shared<RefreshRateSelector>(kDisplay2Modes,
+ kDisplay2Mode60->getId()));
+ // Ensure that the pacesetter is the one we expect.
+ mScheduler->setPacesetterDisplay(kDisplayId1);
+
+ // Switching to the other will call onNewVsyncSchedule.
+ EXPECT_CALL(*mEventThread, onNewVsyncSchedule(mScheduler->getVsyncSchedule(kDisplayId2)))
+ .Times(1);
+ mScheduler->setPacesetterDisplay(kDisplayId2);
+}
+
+TEST_F(SchedulerTest, promotingSamePacesetterDoesNotChangeVsyncSchedule) {
+ EXPECT_CALL(*mEventThread, onNewVsyncSchedule(_)).Times(0);
+ mScheduler->setPacesetterDisplay(kDisplayId1);
+}
+
} // namespace android::scheduler