libgui: Add unlimited slot support to Surfaces and Consumers
Surfaces can now use `setMaxDequeuedBufferCount` with any value when the
consumer supports it to give themselves an essentially unlimited number
of buffers to use.
ConsumerBase and its libgui children have been updated to allow for
unlimited buffer slots by default (meaning all users opt into this
automatically), and their implementations have been updated to track the
new variable slot limit.
This is part of go/warren-buffers.
Bug: 341359185
Flag: com.android.graphics.libgui.flags.wb_unlimited_slots
Test: new tests, old tests
Change-Id: I374aa204a2e42a17d95c6e0ffaef2c2caaa9c963
diff --git a/libs/gui/IGraphicBufferProducerFlattenables.cpp b/libs/gui/IGraphicBufferProducerFlattenables.cpp
index 4e92a39..8b2e2dd 100644
--- a/libs/gui/IGraphicBufferProducerFlattenables.cpp
+++ b/libs/gui/IGraphicBufferProducerFlattenables.cpp
@@ -128,7 +128,7 @@
constexpr size_t IGraphicBufferProducer::QueueBufferOutput::minFlattenedSize() {
return sizeof(width) + sizeof(height) + sizeof(transformHint) + sizeof(numPendingBuffers) +
sizeof(nextFrameNumber) + sizeof(bufferReplaced) + sizeof(maxBufferCount) +
- sizeof(result);
+ sizeof(result) + sizeof(isSlotExpansionAllowed);
}
size_t IGraphicBufferProducer::QueueBufferOutput::getFlattenedSize() const {
return minFlattenedSize() + frameTimestamps.getFlattenedSize();
@@ -152,6 +152,7 @@
FlattenableUtils::write(buffer, size, nextFrameNumber);
FlattenableUtils::write(buffer, size, bufferReplaced);
FlattenableUtils::write(buffer, size, maxBufferCount);
+ FlattenableUtils::write(buffer, size, isSlotExpansionAllowed);
status_t result = frameTimestamps.flatten(buffer, size, fds, count);
if (result != NO_ERROR) {
@@ -175,6 +176,7 @@
FlattenableUtils::read(buffer, size, nextFrameNumber);
FlattenableUtils::read(buffer, size, bufferReplaced);
FlattenableUtils::read(buffer, size, maxBufferCount);
+ FlattenableUtils::read(buffer, size, isSlotExpansionAllowed);
status_t result = frameTimestamps.unflatten(buffer, size, fds, count);
if (result != NO_ERROR) {