blob: 6084ee330c25ef09cbd985c95fb6cb42bf86d569 [file] [log] [blame]
Pawin Vongmasa36653902018-11-15 00:10:25 -08001/*
2 * Copyright 2018, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "Codec2Buffer"
My Name6bd9a7d2022-03-25 12:37:58 -070019#define ATRACE_TAG ATRACE_TAG_VIDEO
Pawin Vongmasa36653902018-11-15 00:10:25 -080020#include <utils/Log.h>
My Name6bd9a7d2022-03-25 12:37:58 -070021#include <utils/Trace.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080022
Wonsik Kima79c5522022-01-18 16:29:24 -080023#include <aidl/android/hardware/graphics/common/Cta861_3.h>
24#include <aidl/android/hardware/graphics/common/Smpte2086.h>
bohua222c0b2021-01-12 18:54:53 -080025#include <android-base/properties.h>
Wonsik Kim41d83432020-04-27 16:40:49 -070026#include <android/hardware/cas/native/1.0/types.h>
27#include <android/hardware/drm/1.0/types.h>
Wonsik Kima79c5522022-01-18 16:29:24 -080028#include <android/hardware/graphics/common/1.2/types.h>
29#include <android/hardware/graphics/mapper/4.0/IMapper.h>
30#include <gralloctypes/Gralloc4.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080031#include <hidlmemory/FrameworkUtils.h>
32#include <media/hardware/HardwareAPI.h>
Robert Shih895fba92019-07-16 16:29:44 -070033#include <media/stagefright/CodecBase.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080034#include <media/stagefright/MediaCodecConstants.h>
35#include <media/stagefright/foundation/ABuffer.h>
36#include <media/stagefright/foundation/AMessage.h>
37#include <media/stagefright/foundation/AUtils.h>
Wonsik Kim41d83432020-04-27 16:40:49 -070038#include <mediadrm/ICrypto.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080039#include <nativebase/nativebase.h>
Wonsik Kimebe0f9e2019-07-03 11:06:51 -070040#include <ui/Fence.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080041
42#include <C2AllocatorGralloc.h>
43#include <C2BlockInternal.h>
44#include <C2Debug.h>
45
46#include "Codec2Buffer.h"
47
48namespace android {
49
50// Codec2Buffer
51
52bool Codec2Buffer::canCopyLinear(const std::shared_ptr<C2Buffer> &buffer) const {
53 if (const_cast<Codec2Buffer *>(this)->base() == nullptr) {
54 return false;
55 }
56 if (!buffer) {
57 // Nothing to copy, so we can copy by doing nothing.
58 return true;
59 }
60 if (buffer->data().type() != C2BufferData::LINEAR) {
61 return false;
62 }
63 if (buffer->data().linearBlocks().size() == 0u) {
64 // Nothing to copy, so we can copy by doing nothing.
65 return true;
66 } else if (buffer->data().linearBlocks().size() > 1u) {
67 // We don't know how to copy more than one blocks.
68 return false;
69 }
70 if (buffer->data().linearBlocks()[0].size() > capacity()) {
71 // It won't fit.
72 return false;
73 }
74 return true;
75}
76
77bool Codec2Buffer::copyLinear(const std::shared_ptr<C2Buffer> &buffer) {
78 // We assume that all canCopyLinear() checks passed.
79 if (!buffer || buffer->data().linearBlocks().size() == 0u
80 || buffer->data().linearBlocks()[0].size() == 0u) {
81 setRange(0, 0);
82 return true;
83 }
84 C2ReadView view = buffer->data().linearBlocks()[0].map().get();
85 if (view.error() != C2_OK) {
86 ALOGD("Error while mapping: %d", view.error());
87 return false;
88 }
89 if (view.capacity() > capacity()) {
90 ALOGD("C2ConstLinearBlock lied --- it actually doesn't fit: view(%u) > this(%zu)",
91 view.capacity(), capacity());
92 return false;
93 }
94 memcpy(base(), view.data(), view.capacity());
95 setRange(0, view.capacity());
96 return true;
97}
98
99void Codec2Buffer::setImageData(const sp<ABuffer> &imageData) {
Wonsik Kimc48ddcf2019-02-11 16:16:57 -0800100 mImageData = imageData;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800101}
102
103// LocalLinearBuffer
104
105bool LocalLinearBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
106 return canCopyLinear(buffer);
107}
108
109bool LocalLinearBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
110 return copyLinear(buffer);
111}
112
113// DummyContainerBuffer
114
Pawin Vongmasa8be93112018-12-11 14:01:42 -0800115static uint8_t sDummyByte[1] = { 0 };
116
Pawin Vongmasa36653902018-11-15 00:10:25 -0800117DummyContainerBuffer::DummyContainerBuffer(
118 const sp<AMessage> &format, const std::shared_ptr<C2Buffer> &buffer)
Pawin Vongmasa8be93112018-12-11 14:01:42 -0800119 : Codec2Buffer(format, new ABuffer(sDummyByte, 1)),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800120 mBufferRef(buffer) {
121 setRange(0, buffer ? 1 : 0);
122}
123
124std::shared_ptr<C2Buffer> DummyContainerBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700125 return mBufferRef;
126}
127
128void DummyContainerBuffer::clearC2BufferRefs() {
129 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800130}
131
132bool DummyContainerBuffer::canCopy(const std::shared_ptr<C2Buffer> &) const {
133 return !mBufferRef;
134}
135
136bool DummyContainerBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
137 mBufferRef = buffer;
138 setRange(0, mBufferRef ? 1 : 0);
139 return true;
140}
141
142// LinearBlockBuffer
143
144// static
145sp<LinearBlockBuffer> LinearBlockBuffer::Allocate(
146 const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block) {
147 C2WriteView writeView(block->map().get());
148 if (writeView.error() != C2_OK) {
149 return nullptr;
150 }
151 return new LinearBlockBuffer(format, std::move(writeView), block);
152}
153
154std::shared_ptr<C2Buffer> LinearBlockBuffer::asC2Buffer() {
155 return C2Buffer::CreateLinearBuffer(mBlock->share(offset(), size(), C2Fence()));
156}
157
158bool LinearBlockBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
159 return canCopyLinear(buffer);
160}
161
162bool LinearBlockBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
163 return copyLinear(buffer);
164}
165
166LinearBlockBuffer::LinearBlockBuffer(
167 const sp<AMessage> &format,
168 C2WriteView&& writeView,
169 const std::shared_ptr<C2LinearBlock> &block)
170 : Codec2Buffer(format, new ABuffer(writeView.data(), writeView.size())),
171 mWriteView(writeView),
172 mBlock(block) {
173}
174
175// ConstLinearBlockBuffer
176
177// static
178sp<ConstLinearBlockBuffer> ConstLinearBlockBuffer::Allocate(
179 const sp<AMessage> &format, const std::shared_ptr<C2Buffer> &buffer) {
180 if (!buffer
181 || buffer->data().type() != C2BufferData::LINEAR
182 || buffer->data().linearBlocks().size() != 1u) {
183 return nullptr;
184 }
185 C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
186 if (readView.error() != C2_OK) {
187 return nullptr;
188 }
189 return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
190}
191
192ConstLinearBlockBuffer::ConstLinearBlockBuffer(
193 const sp<AMessage> &format,
194 C2ReadView&& readView,
195 const std::shared_ptr<C2Buffer> &buffer)
196 : Codec2Buffer(format, new ABuffer(
197 // NOTE: ABuffer only takes non-const pointer but this data is
198 // supposed to be read-only.
199 const_cast<uint8_t *>(readView.data()), readView.capacity())),
200 mReadView(readView),
201 mBufferRef(buffer) {
202}
203
204std::shared_ptr<C2Buffer> ConstLinearBlockBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700205 return mBufferRef;
206}
207
208void ConstLinearBlockBuffer::clearC2BufferRefs() {
209 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800210}
211
212// GraphicView2MediaImageConverter
213
214namespace {
215
216class GraphicView2MediaImageConverter {
217public:
218 /**
219 * Creates a C2GraphicView <=> MediaImage converter
220 *
221 * \param view C2GraphicView object
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700222 * \param format buffer format
Wonsik Kim7d966312019-06-04 14:00:49 -0700223 * \param copy whether the converter is used for copy or not
Pawin Vongmasa36653902018-11-15 00:10:25 -0800224 */
225 GraphicView2MediaImageConverter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700226 const C2GraphicView &view, const sp<AMessage> &format, bool copy)
Pawin Vongmasa36653902018-11-15 00:10:25 -0800227 : mInitCheck(NO_INIT),
228 mView(view),
229 mWidth(view.width()),
230 mHeight(view.height()),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800231 mAllocatedDepth(0),
232 mBackBufferSize(0),
233 mMediaImage(new ABuffer(sizeof(MediaImage2))) {
My Name6bd9a7d2022-03-25 12:37:58 -0700234 ATRACE_CALL();
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700235 if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
236 mClientColorFormat = COLOR_FormatYUV420Flexible;
237 }
238 if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
239 mComponentColorFormat = COLOR_FormatYUV420Flexible;
240 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800241 if (view.error() != C2_OK) {
242 ALOGD("Converter: view.error() = %d", view.error());
243 mInitCheck = BAD_VALUE;
244 return;
245 }
246 MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
247 const C2PlanarLayout &layout = view.layout();
248 if (layout.numPlanes == 0) {
249 ALOGD("Converter: 0 planes");
250 mInitCheck = BAD_VALUE;
251 return;
252 }
Harish Mahendrakarcac53852019-02-20 10:59:10 -0800253 memset(mediaImage, 0, sizeof(*mediaImage));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800254 mAllocatedDepth = layout.planes[0].allocatedDepth;
255 uint32_t bitDepth = layout.planes[0].bitDepth;
256
257 // align width and height to support subsampling cleanly
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700258 uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
259 uint32_t vStride = align(view.crop().height, 2);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800260
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700261 bool tryWrapping = !copy;
262
Pawin Vongmasa36653902018-11-15 00:10:25 -0800263 switch (layout.type) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700264 case C2PlanarLayout::TYPE_YUV: {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800265 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
266 if (layout.numPlanes != 3) {
267 ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
268 mInitCheck = BAD_VALUE;
269 return;
270 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700271 C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
272 C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
273 C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
274 if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
275 || uPlane.channel != C2PlaneInfo::CHANNEL_CB
276 || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
277 ALOGD("Converter: not YUV layout");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800278 mInitCheck = BAD_VALUE;
279 return;
280 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700281 bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
282 && uPlane.rowSampling == 2 && uPlane.colSampling == 2
283 && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
284 if (yuv420888) {
285 for (uint32_t i = 0; i < 3; ++i) {
286 const C2PlaneInfo &plane = layout.planes[i];
287 if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
288 yuv420888 = false;
289 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800290 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700291 }
292 yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
293 }
294 int32_t copyFormat = mClientColorFormat;
295 if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
296 if (uPlane.colInc == 2 && vPlane.colInc == 2
297 && yPlane.rowInc == uPlane.rowInc) {
298 copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
299 } else if (uPlane.colInc == 1 && vPlane.colInc == 1
300 && yPlane.rowInc == uPlane.rowInc * 2) {
301 copyFormat = COLOR_FormatYUV420PackedPlanar;
302 }
303 }
304 ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
305 "v:{colInc=%d rowInc=%d}",
306 mClientColorFormat,
307 yPlane.colInc, yPlane.rowInc,
308 uPlane.colInc, uPlane.rowInc,
309 vPlane.colInc, vPlane.rowInc);
310 switch (copyFormat) {
311 case COLOR_FormatYUV420Flexible:
Pawin Vongmasa36653902018-11-15 00:10:25 -0800312 case COLOR_FormatYUV420Planar:
313 case COLOR_FormatYUV420PackedPlanar:
314 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
315 mediaImage->mPlane[mediaImage->Y].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700316 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800317 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
318 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
319
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700320 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800321 mediaImage->mPlane[mediaImage->U].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700322 mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800323 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
324 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
325
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700326 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800327 mediaImage->mPlane[mediaImage->V].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700328 mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800329 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
330 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700331
332 if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
333 tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
334 && yPlane.rowInc == uPlane.rowInc * 2
335 && view.data()[0] < view.data()[1]
336 && view.data()[1] < view.data()[2];
337 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800338 break;
339
340 case COLOR_FormatYUV420SemiPlanar:
341 case COLOR_FormatYUV420PackedSemiPlanar:
342 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
343 mediaImage->mPlane[mediaImage->Y].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700344 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800345 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
346 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
347
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700348 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800349 mediaImage->mPlane[mediaImage->U].mColInc = 2;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700350 mediaImage->mPlane[mediaImage->U].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800351 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
352 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
353
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700354 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800355 mediaImage->mPlane[mediaImage->V].mColInc = 2;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700356 mediaImage->mPlane[mediaImage->V].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800357 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
358 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700359
360 if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
361 tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
362 && yPlane.rowInc == uPlane.rowInc
363 && view.data()[0] < view.data()[1]
364 && view.data()[1] < view.data()[2];
365 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800366 break;
367
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700368 case COLOR_FormatYUVP010:
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700369 // stride is in bytes
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700370 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
371 mediaImage->mPlane[mediaImage->Y].mColInc = 2;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700372 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700373 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
374 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
375
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700376 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700377 mediaImage->mPlane[mediaImage->U].mColInc = 4;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700378 mediaImage->mPlane[mediaImage->U].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700379 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
380 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
381
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700382 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700383 mediaImage->mPlane[mediaImage->V].mColInc = 4;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700384 mediaImage->mPlane[mediaImage->V].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700385 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
386 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700387 if (tryWrapping) {
388 tryWrapping = yPlane.allocatedDepth == 16
389 && uPlane.allocatedDepth == 16
390 && vPlane.allocatedDepth == 16
391 && yPlane.bitDepth == 10
392 && uPlane.bitDepth == 10
393 && vPlane.bitDepth == 10
394 && yPlane.rightShift == 6
395 && uPlane.rightShift == 6
396 && vPlane.rightShift == 6
397 && yPlane.rowSampling == 1 && yPlane.colSampling == 1
398 && uPlane.rowSampling == 2 && uPlane.colSampling == 2
399 && vPlane.rowSampling == 2 && vPlane.colSampling == 2
400 && yPlane.colInc == 2
401 && uPlane.colInc == 4
402 && vPlane.colInc == 4
403 && yPlane.rowInc == uPlane.rowInc
404 && yPlane.rowInc == vPlane.rowInc;
405 }
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700406 break;
407
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700408 default: {
409 // default to fully planar format --- this will be overridden if wrapping
410 // TODO: keep interleaved format
411 int32_t colInc = divUp(mAllocatedDepth, 8u);
412 int32_t rowInc = stride * colInc / yPlane.colSampling;
413 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
414 mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
415 mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
416 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
417 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
418 int32_t offset = rowInc * vStride / yPlane.rowSampling;
419
420 rowInc = stride * colInc / uPlane.colSampling;
421 mediaImage->mPlane[mediaImage->U].mOffset = offset;
422 mediaImage->mPlane[mediaImage->U].mColInc = colInc;
423 mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
424 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
425 mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
426 offset += rowInc * vStride / uPlane.rowSampling;
427
428 rowInc = stride * colInc / vPlane.colSampling;
429 mediaImage->mPlane[mediaImage->V].mOffset = offset;
430 mediaImage->mPlane[mediaImage->V].mColInc = colInc;
431 mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
432 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
433 mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
434 break;
435 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800436 }
437 break;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700438 }
439
Pawin Vongmasa36653902018-11-15 00:10:25 -0800440 case C2PlanarLayout::TYPE_YUVA:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700441 ALOGD("Converter: unrecognized color format "
442 "(client %d component %d) for YUVA layout",
443 mClientColorFormat, mComponentColorFormat);
444 mInitCheck = NO_INIT;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800445 return;
446 case C2PlanarLayout::TYPE_RGB:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700447 ALOGD("Converter: unrecognized color format "
448 "(client %d component %d) for RGB layout",
449 mClientColorFormat, mComponentColorFormat);
450 mInitCheck = NO_INIT;
451 // TODO: support MediaImage layout
452 return;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800453 case C2PlanarLayout::TYPE_RGBA:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700454 ALOGD("Converter: unrecognized color format "
455 "(client %d component %d) for RGBA layout",
456 mClientColorFormat, mComponentColorFormat);
457 mInitCheck = NO_INIT;
458 // TODO: support MediaImage layout
459 return;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800460 default:
461 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700462 if (layout.numPlanes == 1) {
463 const C2PlaneInfo &plane = layout.planes[0];
464 if (plane.colInc < 0 || plane.rowInc < 0) {
465 // Copy-only if we have negative colInc/rowInc
466 tryWrapping = false;
467 }
468 mediaImage->mPlane[0].mOffset = 0;
469 mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
470 mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
471 mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
472 mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
473 } else {
474 ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
475 mClientColorFormat, mComponentColorFormat);
476 mInitCheck = NO_INIT;
477 return;
478 }
479 break;
480 }
481 if (tryWrapping) {
482 // try to map directly. check if the planes are near one another
483 const uint8_t *minPtr = mView.data()[0];
484 const uint8_t *maxPtr = mView.data()[0];
485 int32_t planeSize = 0;
486 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
487 const C2PlaneInfo &plane = layout.planes[i];
488 int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
489 ssize_t minOffset = plane.minOffset(
490 mWidth / plane.colSampling, mHeight / plane.rowSampling);
491 ssize_t maxOffset = plane.maxOffset(
492 mWidth / plane.colSampling, mHeight / plane.rowSampling);
493 if (minPtr > mView.data()[i] + minOffset) {
494 minPtr = mView.data()[i] + minOffset;
495 }
496 if (maxPtr < mView.data()[i] + maxOffset) {
497 maxPtr = mView.data()[i] + maxOffset;
498 }
499 planeSize += planeStride * divUp(mAllocatedDepth, 8u)
500 * align(mHeight, 64) / plane.rowSampling;
501 }
502
Wonsik Kimdc173402021-07-22 19:38:17 -0700503 if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700504 // FIXME: this is risky as reading/writing data out of bound results
505 // in an undefined behavior, but gralloc does assume a
506 // contiguous mapping
507 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
508 const C2PlaneInfo &plane = layout.planes[i];
509 mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
510 mediaImage->mPlane[i].mColInc = plane.colInc;
511 mediaImage->mPlane[i].mRowInc = plane.rowInc;
512 mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
513 mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
514 }
515 mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr),
516 maxPtr - minPtr + 1);
517 ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
518 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800519 }
520 mediaImage->mNumPlanes = layout.numPlanes;
Harish Mahendrakarf7c49e22019-05-24 14:19:16 -0700521 mediaImage->mWidth = view.crop().width;
522 mediaImage->mHeight = view.crop().height;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800523 mediaImage->mBitDepth = bitDepth;
524 mediaImage->mBitDepthAllocated = mAllocatedDepth;
525
526 uint32_t bufferSize = 0;
527 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
528 const C2PlaneInfo &plane = layout.planes[i];
529 if (plane.allocatedDepth < plane.bitDepth
530 || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
531 ALOGD("rightShift value of %u unsupported", plane.rightShift);
532 mInitCheck = BAD_VALUE;
533 return;
534 }
535 if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
536 ALOGD("endianness value of %u unsupported", plane.endianness);
537 mInitCheck = BAD_VALUE;
538 return;
539 }
540 if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700541 ALOGD("different allocatedDepth/bitDepth per plane unsupported");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800542 mInitCheck = BAD_VALUE;
543 return;
544 }
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700545 // stride is in bytes
546 bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800547 }
548
549 mBackBufferSize = bufferSize;
550 mInitCheck = OK;
551 }
552
553 status_t initCheck() const { return mInitCheck; }
554
555 uint32_t backBufferSize() const { return mBackBufferSize; }
556
557 /**
558 * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
559 * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
560 * data into a backing buffer explicitly.
561 *
562 * \return media buffer. This is null if wrapping failed.
563 */
564 sp<ABuffer> wrap() const {
565 if (mBackBuffer == nullptr) {
566 return mWrapped;
567 }
568 return nullptr;
569 }
570
571 bool setBackBuffer(const sp<ABuffer> &backBuffer) {
Wonsik Kim186fdbf2019-01-29 13:30:01 -0800572 if (backBuffer == nullptr) {
573 return false;
574 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800575 if (backBuffer->capacity() < mBackBufferSize) {
576 return false;
577 }
578 backBuffer->setRange(0, mBackBufferSize);
579 mBackBuffer = backBuffer;
580 return true;
581 }
582
583 /**
584 * Copy C2GraphicView to MediaImage2.
585 */
586 status_t copyToMediaImage() {
My Name6bd9a7d2022-03-25 12:37:58 -0700587 ATRACE_CALL();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800588 if (mInitCheck != OK) {
589 return mInitCheck;
590 }
591 return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
592 }
593
594 const sp<ABuffer> &imageData() const { return mMediaImage; }
595
596private:
597 status_t mInitCheck;
598
599 const C2GraphicView mView;
600 uint32_t mWidth;
601 uint32_t mHeight;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700602 int32_t mClientColorFormat; ///< SDK color format for MediaImage
603 int32_t mComponentColorFormat; ///< SDK color format from component
Pawin Vongmasa36653902018-11-15 00:10:25 -0800604 sp<ABuffer> mWrapped; ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
605 uint32_t mAllocatedDepth;
606 uint32_t mBackBufferSize;
607 sp<ABuffer> mMediaImage;
608 std::function<sp<ABuffer>(size_t)> mAlloc;
609
610 sp<ABuffer> mBackBuffer; ///< backing buffer if we have to copy C2Buffer <=> ABuffer
611
612 MediaImage2 *getMediaImage() {
613 return (MediaImage2 *)mMediaImage->base();
614 }
615};
616
617} // namespace
618
619// GraphicBlockBuffer
620
621// static
622sp<GraphicBlockBuffer> GraphicBlockBuffer::Allocate(
623 const sp<AMessage> &format,
624 const std::shared_ptr<C2GraphicBlock> &block,
625 std::function<sp<ABuffer>(size_t)> alloc) {
My Name6bd9a7d2022-03-25 12:37:58 -0700626 ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800627 C2GraphicView view(block->map().get());
My Name6bd9a7d2022-03-25 12:37:58 -0700628 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800629 if (view.error() != C2_OK) {
630 ALOGD("C2GraphicBlock::map failed: %d", view.error());
631 return nullptr;
632 }
633
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700634 GraphicView2MediaImageConverter converter(view, format, false /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800635 if (converter.initCheck() != OK) {
636 ALOGD("Converter init failed: %d", converter.initCheck());
637 return nullptr;
638 }
639 bool wrapped = true;
640 sp<ABuffer> buffer = converter.wrap();
641 if (buffer == nullptr) {
642 buffer = alloc(converter.backBufferSize());
643 if (!converter.setBackBuffer(buffer)) {
644 ALOGD("Converter failed to set back buffer");
645 return nullptr;
646 }
647 wrapped = false;
648 }
649 return new GraphicBlockBuffer(
650 format,
651 buffer,
652 std::move(view),
653 block,
654 converter.imageData(),
655 wrapped);
656}
657
658GraphicBlockBuffer::GraphicBlockBuffer(
659 const sp<AMessage> &format,
660 const sp<ABuffer> &buffer,
661 C2GraphicView &&view,
662 const std::shared_ptr<C2GraphicBlock> &block,
663 const sp<ABuffer> &imageData,
664 bool wrapped)
665 : Codec2Buffer(format, buffer),
666 mView(view),
667 mBlock(block),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800668 mWrapped(wrapped) {
669 setImageData(imageData);
670}
671
672std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
My Name6bd9a7d2022-03-25 12:37:58 -0700673 ATRACE_CALL();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800674 uint32_t width = mView.width();
675 uint32_t height = mView.height();
676 if (!mWrapped) {
677 (void)ImageCopy(mView, base(), imageData());
678 }
679 return C2Buffer::CreateGraphicBuffer(
680 mBlock->share(C2Rect(width, height), C2Fence()));
681}
682
683// GraphicMetadataBuffer
684GraphicMetadataBuffer::GraphicMetadataBuffer(
685 const sp<AMessage> &format,
686 const std::shared_ptr<C2Allocator> &alloc)
687 : Codec2Buffer(format, new ABuffer(sizeof(VideoNativeMetadata))),
688 mAlloc(alloc) {
689 ((VideoNativeMetadata *)base())->pBuffer = nullptr;
690}
691
692std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
bohua222c0b2021-01-12 18:54:53 -0800693#ifdef __LP64__
694 static std::once_flag s_checkOnce;
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700695 static bool s_is64bitOk {true};
bohua222c0b2021-01-12 18:54:53 -0800696 std::call_once(s_checkOnce, [&](){
697 const std::string abi32list =
698 ::android::base::GetProperty("ro.product.cpu.abilist32", "");
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700699 if (!abi32list.empty()) {
700 int32_t inputSurfaceSetting =
701 ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
702 s_is64bitOk = inputSurfaceSetting != 0;
bohua222c0b2021-01-12 18:54:53 -0800703 }
704 });
705
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700706 if (!s_is64bitOk) {
707 ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
708 "when debug.stagefright.c2inputsurface is set to 0");
bohua222c0b2021-01-12 18:54:53 -0800709 return nullptr;
710 }
711#endif
712
Pawin Vongmasa36653902018-11-15 00:10:25 -0800713 VideoNativeMetadata *meta = (VideoNativeMetadata *)base();
714 ANativeWindowBuffer *buffer = (ANativeWindowBuffer *)meta->pBuffer;
715 if (buffer == nullptr) {
716 ALOGD("VideoNativeMetadata contains null buffer");
717 return nullptr;
718 }
719
720 ALOGV("VideoNativeMetadata: %dx%d", buffer->width, buffer->height);
721 C2Handle *handle = WrapNativeCodec2GrallocHandle(
Sungtak Leea4d13be2019-01-23 15:24:46 -0800722 buffer->handle,
Pawin Vongmasa36653902018-11-15 00:10:25 -0800723 buffer->width,
724 buffer->height,
725 buffer->format,
726 buffer->usage,
727 buffer->stride);
728 std::shared_ptr<C2GraphicAllocation> alloc;
729 c2_status_t err = mAlloc->priorGraphicAllocation(handle, &alloc);
730 if (err != C2_OK) {
731 ALOGD("Failed to wrap VideoNativeMetadata into C2GraphicAllocation");
Chih-Yu Huangc0ac3552021-03-11 14:37:10 +0900732 native_handle_close(handle);
733 native_handle_delete(handle);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800734 return nullptr;
735 }
736 std::shared_ptr<C2GraphicBlock> block = _C2BlockFactory::CreateGraphicBlock(alloc);
737
738 meta->pBuffer = 0;
Wonsik Kimebe0f9e2019-07-03 11:06:51 -0700739 // TODO: wrap this in C2Fence so that the component can wait when it
740 // actually starts processing.
741 if (meta->nFenceFd >= 0) {
742 sp<Fence> fence(new Fence(meta->nFenceFd));
743 fence->waitForever(LOG_TAG);
744 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800745 return C2Buffer::CreateGraphicBuffer(
746 block->share(C2Rect(buffer->width, buffer->height), C2Fence()));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800747}
748
749// ConstGraphicBlockBuffer
750
751// static
752sp<ConstGraphicBlockBuffer> ConstGraphicBlockBuffer::Allocate(
753 const sp<AMessage> &format,
754 const std::shared_ptr<C2Buffer> &buffer,
755 std::function<sp<ABuffer>(size_t)> alloc) {
756 if (!buffer
757 || buffer->data().type() != C2BufferData::GRAPHIC
758 || buffer->data().graphicBlocks().size() != 1u) {
759 ALOGD("C2Buffer precond fail");
760 return nullptr;
761 }
My Name6bd9a7d2022-03-25 12:37:58 -0700762 ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800763 std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
764 buffer->data().graphicBlocks()[0].map().get()));
My Name6bd9a7d2022-03-25 12:37:58 -0700765 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800766 std::unique_ptr<const C2GraphicView> holder;
767
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700768 GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800769 if (converter.initCheck() != OK) {
770 ALOGD("Converter init failed: %d", converter.initCheck());
771 return nullptr;
772 }
773 bool wrapped = true;
774 sp<ABuffer> aBuffer = converter.wrap();
775 if (aBuffer == nullptr) {
776 aBuffer = alloc(converter.backBufferSize());
777 if (!converter.setBackBuffer(aBuffer)) {
778 ALOGD("Converter failed to set back buffer");
779 return nullptr;
780 }
781 wrapped = false;
782 converter.copyToMediaImage();
783 // We don't need the view.
784 holder = std::move(view);
785 }
786 return new ConstGraphicBlockBuffer(
787 format,
788 aBuffer,
789 std::move(view),
790 buffer,
791 converter.imageData(),
792 wrapped);
793}
794
795// static
796sp<ConstGraphicBlockBuffer> ConstGraphicBlockBuffer::AllocateEmpty(
797 const sp<AMessage> &format,
798 std::function<sp<ABuffer>(size_t)> alloc) {
799 int32_t width, height;
800 if (!format->findInt32("width", &width)
801 || !format->findInt32("height", &height)) {
802 ALOGD("format had no width / height");
803 return nullptr;
804 }
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700805 int32_t colorFormat = COLOR_FormatYUV420Flexible;
806 int32_t bpp = 12; // 8(Y) + 2(U) + 2(V)
807 if (format->findInt32(KEY_COLOR_FORMAT, &colorFormat)) {
808 if (colorFormat == COLOR_FormatYUVP010) {
809 bpp = 24; // 16(Y) + 4(U) + 4(V)
810 }
811 }
812 sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800813 return new ConstGraphicBlockBuffer(
814 format,
815 aBuffer,
816 nullptr,
817 nullptr,
818 nullptr,
819 false);
820}
821
822ConstGraphicBlockBuffer::ConstGraphicBlockBuffer(
823 const sp<AMessage> &format,
824 const sp<ABuffer> &aBuffer,
825 std::unique_ptr<const C2GraphicView> &&view,
826 const std::shared_ptr<C2Buffer> &buffer,
827 const sp<ABuffer> &imageData,
828 bool wrapped)
829 : Codec2Buffer(format, aBuffer),
830 mView(std::move(view)),
831 mBufferRef(buffer),
832 mWrapped(wrapped) {
Wonsik Kimc48ddcf2019-02-11 16:16:57 -0800833 setImageData(imageData);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800834}
835
836std::shared_ptr<C2Buffer> ConstGraphicBlockBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700837 return mBufferRef;
838}
839
840void ConstGraphicBlockBuffer::clearC2BufferRefs() {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800841 mView.reset();
Wonsik Kimf9b32122020-04-02 11:30:17 -0700842 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800843}
844
845bool ConstGraphicBlockBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
846 if (mWrapped || mBufferRef) {
847 ALOGD("ConstGraphicBlockBuffer::canCopy: %swrapped ; buffer ref %s",
848 mWrapped ? "" : "not ", mBufferRef ? "exists" : "doesn't exist");
849 return false;
850 }
851 if (!buffer) {
852 // Nothing to copy, so we can copy by doing nothing.
853 return true;
854 }
855 if (buffer->data().type() != C2BufferData::GRAPHIC) {
856 ALOGD("ConstGraphicBlockBuffer::canCopy: buffer precondition unsatisfied");
857 return false;
858 }
859 if (buffer->data().graphicBlocks().size() == 0) {
860 return true;
861 } else if (buffer->data().graphicBlocks().size() != 1u) {
862 ALOGD("ConstGraphicBlockBuffer::canCopy: too many blocks");
863 return false;
864 }
865
My Name6bd9a7d2022-03-25 12:37:58 -0700866 ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800867 GraphicView2MediaImageConverter converter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700868 buffer->data().graphicBlocks()[0].map().get(),
869 // FIXME: format() is not const, but we cannot change it, so do a const cast here
870 const_cast<ConstGraphicBlockBuffer *>(this)->format(),
871 true /* copy */);
My Name6bd9a7d2022-03-25 12:37:58 -0700872 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800873 if (converter.initCheck() != OK) {
874 ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
875 return false;
876 }
877 if (converter.backBufferSize() > capacity()) {
878 ALOGD("ConstGraphicBlockBuffer::canCopy: insufficient capacity: req %u has %zu",
879 converter.backBufferSize(), capacity());
880 return false;
881 }
882 return true;
883}
884
885bool ConstGraphicBlockBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
886 if (!buffer || buffer->data().graphicBlocks().size() == 0) {
887 setRange(0, 0);
888 return true;
889 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800890
891 GraphicView2MediaImageConverter converter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700892 buffer->data().graphicBlocks()[0].map().get(), format(), true /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800893 if (converter.initCheck() != OK) {
894 ALOGD("ConstGraphicBlockBuffer::copy: converter init failed: %d", converter.initCheck());
895 return false;
896 }
897 sp<ABuffer> aBuffer = new ABuffer(base(), capacity());
898 if (!converter.setBackBuffer(aBuffer)) {
899 ALOGD("ConstGraphicBlockBuffer::copy: set back buffer failed");
900 return false;
901 }
Pin-chih Lin1971e2c2019-04-15 19:36:26 +0800902 setRange(0, aBuffer->size()); // align size info
Pawin Vongmasa36653902018-11-15 00:10:25 -0800903 converter.copyToMediaImage();
904 setImageData(converter.imageData());
905 mBufferRef = buffer;
906 return true;
907}
908
909// EncryptedLinearBlockBuffer
910
911EncryptedLinearBlockBuffer::EncryptedLinearBlockBuffer(
912 const sp<AMessage> &format,
913 const std::shared_ptr<C2LinearBlock> &block,
914 const sp<IMemory> &memory,
915 int32_t heapSeqNum)
Ytai Ben-Tsvi7dd39722019-09-05 15:14:30 -0700916 // TODO: Using unsecurePointer() has some associated security pitfalls
917 // (see declaration for details).
918 // Either document why it is safe in this case or address the
919 // issue (e.g. by copying).
920 : Codec2Buffer(format, new ABuffer(memory->unsecurePointer(), memory->size())),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800921 mBlock(block),
922 mMemory(memory),
923 mHeapSeqNum(heapSeqNum) {
924}
925
926std::shared_ptr<C2Buffer> EncryptedLinearBlockBuffer::asC2Buffer() {
927 return C2Buffer::CreateLinearBuffer(mBlock->share(offset(), size(), C2Fence()));
928}
929
930void EncryptedLinearBlockBuffer::fillSourceBuffer(
Robert Shih895fba92019-07-16 16:29:44 -0700931 hardware::drm::V1_0::SharedBuffer *source) {
932 BufferChannelBase::IMemoryToSharedBuffer(mMemory, mHeapSeqNum, source);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800933}
934
935void EncryptedLinearBlockBuffer::fillSourceBuffer(
936 hardware::cas::native::V1_0::SharedBuffer *source) {
937 ssize_t offset;
938 size_t size;
939
940 mHidlMemory = hardware::fromHeap(mMemory->getMemory(&offset, &size));
941 source->heapBase = *mHidlMemory;
942 source->offset = offset;
943 source->size = size;
944}
945
946bool EncryptedLinearBlockBuffer::copyDecryptedContent(
947 const sp<IMemory> &decrypted, size_t length) {
948 C2WriteView view = mBlock->map().get();
949 if (view.error() != C2_OK) {
950 return false;
951 }
952 if (view.size() < length) {
953 return false;
954 }
Ytai Ben-Tsvi7dd39722019-09-05 15:14:30 -0700955 memcpy(view.data(), decrypted->unsecurePointer(), length);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800956 return true;
957}
958
959bool EncryptedLinearBlockBuffer::copyDecryptedContentFromMemory(size_t length) {
960 return copyDecryptedContent(mMemory, length);
961}
962
963native_handle_t *EncryptedLinearBlockBuffer::handle() const {
964 return const_cast<native_handle_t *>(mBlock->handle());
965}
966
Wonsik Kima79c5522022-01-18 16:29:24 -0800967using ::aidl::android::hardware::graphics::common::Cta861_3;
968using ::aidl::android::hardware::graphics::common::Smpte2086;
969
970using ::android::gralloc4::MetadataType_Cta861_3;
971using ::android::gralloc4::MetadataType_Smpte2086;
972using ::android::gralloc4::MetadataType_Smpte2094_40;
973
974using ::android::hardware::Return;
975using ::android::hardware::hidl_vec;
976
977using Error4 = ::android::hardware::graphics::mapper::V4_0::Error;
978using IMapper4 = ::android::hardware::graphics::mapper::V4_0::IMapper;
979
980namespace {
981
982sp<IMapper4> GetMapper4() {
983 static sp<IMapper4> sMapper = IMapper4::getService();
984 return sMapper;
985}
986
Wonsik Kime2aa2402022-04-08 14:07:14 -0700987class Gralloc4Buffer {
Wonsik Kima79c5522022-01-18 16:29:24 -0800988public:
Wonsik Kime2aa2402022-04-08 14:07:14 -0700989 Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
990 sp<IMapper4> mapper = GetMapper4();
991 if (!mapper) {
992 return;
993 }
994 // Unwrap raw buffer handle from the C2Handle
995 native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
996 if (!nh) {
997 return;
998 }
999 // Import the raw handle so IMapper can use the buffer. The imported
1000 // handle must be freed when the client is done with the buffer.
1001 mapper->importBuffer(
1002 hardware::hidl_handle(nh),
1003 [&](const Error4 &error, void *buffer) {
1004 if (error == Error4::NONE) {
1005 mBuffer = buffer;
1006 }
1007 });
1008
1009 // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
1010 // does not clone the fds. Thus we need to delete the handle
1011 // without closing it.
1012 native_handle_delete(nh);
1013 }
1014
1015 ~Gralloc4Buffer() {
1016 sp<IMapper4> mapper = GetMapper4();
1017 if (mapper && mBuffer) {
1018 // Free the imported buffer handle. This does not release the
1019 // underlying buffer itself.
1020 mapper->freeBuffer(mBuffer);
Wonsik Kima79c5522022-01-18 16:29:24 -08001021 }
1022 }
Wonsik Kime2aa2402022-04-08 14:07:14 -07001023
1024 void *get() const { return mBuffer; }
1025 operator bool() const { return (mBuffer != nullptr); }
Wonsik Kima79c5522022-01-18 16:29:24 -08001026private:
Wonsik Kime2aa2402022-04-08 14:07:14 -07001027 void *mBuffer;
Wonsik Kima79c5522022-01-18 16:29:24 -08001028};
1029
1030} // namspace
1031
1032c2_status_t GetHdrMetadataFromGralloc4Handle(
1033 const C2Handle *const handle,
1034 std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
1035 std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
1036 c2_status_t err = C2_OK;
Wonsik Kima79c5522022-01-18 16:29:24 -08001037 sp<IMapper4> mapper = GetMapper4();
Wonsik Kime2aa2402022-04-08 14:07:14 -07001038 Gralloc4Buffer buffer(handle);
1039 if (!mapper || !buffer) {
Wonsik Kima79c5522022-01-18 16:29:24 -08001040 // Gralloc4 not supported; nothing to do
1041 return err;
1042 }
1043 Error4 mapperErr = Error4::NONE;
1044 if (staticInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001045 ALOGV("Grabbing static HDR info from gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001046 staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
1047 memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
1048 (*staticInfo)->maxCll = 0;
1049 (*staticInfo)->maxFall = 0;
1050 IMapper4::get_cb cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1051 mapperErr = err;
1052 if (err != Error4::NONE) {
1053 return;
1054 }
1055
1056 std::optional<Smpte2086> smpte2086;
1057 gralloc4::decodeSmpte2086(vec, &smpte2086);
1058 if (smpte2086) {
1059 (*staticInfo)->mastering.red.x = smpte2086->primaryRed.x;
1060 (*staticInfo)->mastering.red.y = smpte2086->primaryRed.y;
1061 (*staticInfo)->mastering.green.x = smpte2086->primaryGreen.x;
1062 (*staticInfo)->mastering.green.y = smpte2086->primaryGreen.y;
1063 (*staticInfo)->mastering.blue.x = smpte2086->primaryBlue.x;
1064 (*staticInfo)->mastering.blue.y = smpte2086->primaryBlue.y;
1065 (*staticInfo)->mastering.white.x = smpte2086->whitePoint.x;
1066 (*staticInfo)->mastering.white.y = smpte2086->whitePoint.y;
1067
1068 (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
1069 (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
1070 } else {
1071 mapperErr = Error4::BAD_VALUE;
1072 }
1073 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001074 Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001075 if (!ret.isOk()) {
1076 err = C2_REFUSED;
1077 } else if (mapperErr != Error4::NONE) {
1078 err = C2_CORRUPTED;
1079 }
1080 cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1081 mapperErr = err;
1082 if (err != Error4::NONE) {
1083 return;
1084 }
1085
1086 std::optional<Cta861_3> cta861_3;
1087 gralloc4::decodeCta861_3(vec, &cta861_3);
1088 if (cta861_3) {
1089 (*staticInfo)->maxCll = cta861_3->maxContentLightLevel;
1090 (*staticInfo)->maxFall = cta861_3->maxFrameAverageLightLevel;
1091 } else {
1092 mapperErr = Error4::BAD_VALUE;
1093 }
1094 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001095 ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001096 if (!ret.isOk()) {
1097 err = C2_REFUSED;
1098 } else if (mapperErr != Error4::NONE) {
1099 err = C2_CORRUPTED;
1100 }
1101 }
1102 if (dynamicInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001103 ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001104 dynamicInfo->reset();
1105 IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1106 mapperErr = err;
1107 if (err != Error4::NONE) {
1108 return;
1109 }
1110 if (!dynamicInfo) {
1111 return;
1112 }
1113 *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
1114 vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
1115 memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
1116 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001117 Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001118 if (!ret.isOk() || mapperErr != Error4::NONE) {
1119 dynamicInfo->reset();
1120 }
1121 }
1122
1123 return err;
1124}
1125
1126c2_status_t SetHdrMetadataToGralloc4Handle(
1127 const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
1128 const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
1129 const C2Handle *const handle) {
1130 c2_status_t err = C2_OK;
Wonsik Kima79c5522022-01-18 16:29:24 -08001131 sp<IMapper4> mapper = GetMapper4();
Wonsik Kime2aa2402022-04-08 14:07:14 -07001132 Gralloc4Buffer buffer(handle);
1133 if (!mapper || !buffer) {
Wonsik Kima79c5522022-01-18 16:29:24 -08001134 // Gralloc4 not supported; nothing to do
1135 return err;
1136 }
1137 if (staticInfo && *staticInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001138 ALOGV("Setting static HDR info as gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001139 std::optional<Smpte2086> smpte2086 = Smpte2086{
1140 {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
1141 {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
1142 {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
1143 {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
1144 staticInfo->mastering.maxLuminance,
1145 staticInfo->mastering.minLuminance,
1146 };
1147 hidl_vec<uint8_t> vec;
Wonsik Kime2aa2402022-04-08 14:07:14 -07001148 if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
1149 && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
1150 && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
1151 && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
1152 && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
1153 && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
1154 && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
1155 && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
1156 && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
1157 && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
1158 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
Wonsik Kima79c5522022-01-18 16:29:24 -08001159 if (!ret.isOk()) {
1160 err = C2_REFUSED;
1161 } else if (ret != Error4::NONE) {
1162 err = C2_CORRUPTED;
1163 }
1164 }
1165 std::optional<Cta861_3> cta861_3 = Cta861_3{
1166 staticInfo->maxCll,
1167 staticInfo->maxFall,
1168 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001169 if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
1170 && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
1171 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
Wonsik Kima79c5522022-01-18 16:29:24 -08001172 if (!ret.isOk()) {
1173 err = C2_REFUSED;
1174 } else if (ret != Error4::NONE) {
1175 err = C2_CORRUPTED;
1176 }
1177 }
1178 }
Wonsik Kime2aa2402022-04-08 14:07:14 -07001179 if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
1180 ALOGV("Setting dynamic HDR info as gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001181 std::optional<IMapper4::MetadataType> metadataType;
1182 switch (dynamicInfo->m.type_) {
1183 case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
1184 // TODO
1185 break;
1186 case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40:
1187 metadataType = MetadataType_Smpte2094_40;
1188 break;
1189 }
Taehwan Kim24335412022-05-12 10:40:29 +09001190
Wonsik Kima79c5522022-01-18 16:29:24 -08001191 if (metadataType) {
Taehwan Kim24335412022-05-12 10:40:29 +09001192 std::vector<uint8_t> smpte2094_40;
1193 smpte2094_40.resize(dynamicInfo->flexCount());
1194 memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
1195
1196 hidl_vec<uint8_t> vec;
1197 if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
1198 Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
1199 if (!ret.isOk()) {
1200 err = C2_REFUSED;
1201 } else if (ret != Error4::NONE) {
1202 err = C2_CORRUPTED;
1203 }
Wonsik Kima79c5522022-01-18 16:29:24 -08001204 }
1205 } else {
1206 err = C2_BAD_VALUE;
1207 }
1208 }
1209
1210 return err;
1211}
1212
Pawin Vongmasa36653902018-11-15 00:10:25 -08001213} // namespace android