blob: cde4c72f070cd8b5af71d02326eac9ae26ada0fd [file] [log] [blame]
Pawin Vongmasa36653902018-11-15 00:10:25 -08001/*
2 * Copyright 2018, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "Codec2Buffer"
My Name6bd9a7d2022-03-25 12:37:58 -070019#define ATRACE_TAG ATRACE_TAG_VIDEO
Pawin Vongmasa36653902018-11-15 00:10:25 -080020#include <utils/Log.h>
My Name6bd9a7d2022-03-25 12:37:58 -070021#include <utils/Trace.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080022
Wonsik Kima79c5522022-01-18 16:29:24 -080023#include <aidl/android/hardware/graphics/common/Cta861_3.h>
24#include <aidl/android/hardware/graphics/common/Smpte2086.h>
bohua222c0b2021-01-12 18:54:53 -080025#include <android-base/properties.h>
Wonsik Kim41d83432020-04-27 16:40:49 -070026#include <android/hardware/cas/native/1.0/types.h>
27#include <android/hardware/drm/1.0/types.h>
Wonsik Kima79c5522022-01-18 16:29:24 -080028#include <android/hardware/graphics/common/1.2/types.h>
29#include <android/hardware/graphics/mapper/4.0/IMapper.h>
30#include <gralloctypes/Gralloc4.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080031#include <hidlmemory/FrameworkUtils.h>
32#include <media/hardware/HardwareAPI.h>
Robert Shih895fba92019-07-16 16:29:44 -070033#include <media/stagefright/CodecBase.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080034#include <media/stagefright/MediaCodecConstants.h>
35#include <media/stagefright/foundation/ABuffer.h>
36#include <media/stagefright/foundation/AMessage.h>
37#include <media/stagefright/foundation/AUtils.h>
Wonsik Kim41d83432020-04-27 16:40:49 -070038#include <mediadrm/ICrypto.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080039#include <nativebase/nativebase.h>
Wonsik Kimebe0f9e2019-07-03 11:06:51 -070040#include <ui/Fence.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080041
42#include <C2AllocatorGralloc.h>
43#include <C2BlockInternal.h>
44#include <C2Debug.h>
45
46#include "Codec2Buffer.h"
47
48namespace android {
49
50// Codec2Buffer
51
52bool Codec2Buffer::canCopyLinear(const std::shared_ptr<C2Buffer> &buffer) const {
53 if (const_cast<Codec2Buffer *>(this)->base() == nullptr) {
54 return false;
55 }
56 if (!buffer) {
57 // Nothing to copy, so we can copy by doing nothing.
58 return true;
59 }
60 if (buffer->data().type() != C2BufferData::LINEAR) {
61 return false;
62 }
63 if (buffer->data().linearBlocks().size() == 0u) {
64 // Nothing to copy, so we can copy by doing nothing.
65 return true;
66 } else if (buffer->data().linearBlocks().size() > 1u) {
67 // We don't know how to copy more than one blocks.
68 return false;
69 }
70 if (buffer->data().linearBlocks()[0].size() > capacity()) {
71 // It won't fit.
72 return false;
73 }
74 return true;
75}
76
77bool Codec2Buffer::copyLinear(const std::shared_ptr<C2Buffer> &buffer) {
78 // We assume that all canCopyLinear() checks passed.
79 if (!buffer || buffer->data().linearBlocks().size() == 0u
80 || buffer->data().linearBlocks()[0].size() == 0u) {
81 setRange(0, 0);
82 return true;
83 }
84 C2ReadView view = buffer->data().linearBlocks()[0].map().get();
85 if (view.error() != C2_OK) {
86 ALOGD("Error while mapping: %d", view.error());
87 return false;
88 }
89 if (view.capacity() > capacity()) {
90 ALOGD("C2ConstLinearBlock lied --- it actually doesn't fit: view(%u) > this(%zu)",
91 view.capacity(), capacity());
92 return false;
93 }
94 memcpy(base(), view.data(), view.capacity());
95 setRange(0, view.capacity());
96 return true;
97}
98
99void Codec2Buffer::setImageData(const sp<ABuffer> &imageData) {
Wonsik Kimc48ddcf2019-02-11 16:16:57 -0800100 mImageData = imageData;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800101}
102
103// LocalLinearBuffer
104
105bool LocalLinearBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
106 return canCopyLinear(buffer);
107}
108
109bool LocalLinearBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
110 return copyLinear(buffer);
111}
112
113// DummyContainerBuffer
114
Pawin Vongmasa8be93112018-12-11 14:01:42 -0800115static uint8_t sDummyByte[1] = { 0 };
116
Pawin Vongmasa36653902018-11-15 00:10:25 -0800117DummyContainerBuffer::DummyContainerBuffer(
118 const sp<AMessage> &format, const std::shared_ptr<C2Buffer> &buffer)
Pawin Vongmasa8be93112018-12-11 14:01:42 -0800119 : Codec2Buffer(format, new ABuffer(sDummyByte, 1)),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800120 mBufferRef(buffer) {
121 setRange(0, buffer ? 1 : 0);
122}
123
124std::shared_ptr<C2Buffer> DummyContainerBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700125 return mBufferRef;
126}
127
128void DummyContainerBuffer::clearC2BufferRefs() {
129 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800130}
131
132bool DummyContainerBuffer::canCopy(const std::shared_ptr<C2Buffer> &) const {
133 return !mBufferRef;
134}
135
136bool DummyContainerBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
137 mBufferRef = buffer;
138 setRange(0, mBufferRef ? 1 : 0);
139 return true;
140}
141
142// LinearBlockBuffer
143
144// static
145sp<LinearBlockBuffer> LinearBlockBuffer::Allocate(
146 const sp<AMessage> &format, const std::shared_ptr<C2LinearBlock> &block) {
147 C2WriteView writeView(block->map().get());
148 if (writeView.error() != C2_OK) {
149 return nullptr;
150 }
151 return new LinearBlockBuffer(format, std::move(writeView), block);
152}
153
154std::shared_ptr<C2Buffer> LinearBlockBuffer::asC2Buffer() {
155 return C2Buffer::CreateLinearBuffer(mBlock->share(offset(), size(), C2Fence()));
156}
157
158bool LinearBlockBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
159 return canCopyLinear(buffer);
160}
161
162bool LinearBlockBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
163 return copyLinear(buffer);
164}
165
166LinearBlockBuffer::LinearBlockBuffer(
167 const sp<AMessage> &format,
168 C2WriteView&& writeView,
169 const std::shared_ptr<C2LinearBlock> &block)
170 : Codec2Buffer(format, new ABuffer(writeView.data(), writeView.size())),
171 mWriteView(writeView),
172 mBlock(block) {
173}
174
175// ConstLinearBlockBuffer
176
177// static
178sp<ConstLinearBlockBuffer> ConstLinearBlockBuffer::Allocate(
179 const sp<AMessage> &format, const std::shared_ptr<C2Buffer> &buffer) {
180 if (!buffer
181 || buffer->data().type() != C2BufferData::LINEAR
182 || buffer->data().linearBlocks().size() != 1u) {
183 return nullptr;
184 }
185 C2ReadView readView(buffer->data().linearBlocks()[0].map().get());
186 if (readView.error() != C2_OK) {
187 return nullptr;
188 }
189 return new ConstLinearBlockBuffer(format, std::move(readView), buffer);
190}
191
192ConstLinearBlockBuffer::ConstLinearBlockBuffer(
193 const sp<AMessage> &format,
194 C2ReadView&& readView,
195 const std::shared_ptr<C2Buffer> &buffer)
196 : Codec2Buffer(format, new ABuffer(
197 // NOTE: ABuffer only takes non-const pointer but this data is
198 // supposed to be read-only.
199 const_cast<uint8_t *>(readView.data()), readView.capacity())),
200 mReadView(readView),
201 mBufferRef(buffer) {
202}
203
204std::shared_ptr<C2Buffer> ConstLinearBlockBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700205 return mBufferRef;
206}
207
208void ConstLinearBlockBuffer::clearC2BufferRefs() {
209 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800210}
211
212// GraphicView2MediaImageConverter
213
214namespace {
215
216class GraphicView2MediaImageConverter {
217public:
218 /**
219 * Creates a C2GraphicView <=> MediaImage converter
220 *
221 * \param view C2GraphicView object
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700222 * \param format buffer format
Wonsik Kim7d966312019-06-04 14:00:49 -0700223 * \param copy whether the converter is used for copy or not
Pawin Vongmasa36653902018-11-15 00:10:25 -0800224 */
225 GraphicView2MediaImageConverter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700226 const C2GraphicView &view, const sp<AMessage> &format, bool copy)
Pawin Vongmasa36653902018-11-15 00:10:25 -0800227 : mInitCheck(NO_INIT),
228 mView(view),
229 mWidth(view.width()),
230 mHeight(view.height()),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800231 mAllocatedDepth(0),
232 mBackBufferSize(0),
233 mMediaImage(new ABuffer(sizeof(MediaImage2))) {
My Name6bd9a7d2022-03-25 12:37:58 -0700234 ATRACE_CALL();
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700235 if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
236 mClientColorFormat = COLOR_FormatYUV420Flexible;
237 }
238 if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
239 mComponentColorFormat = COLOR_FormatYUV420Flexible;
240 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800241 if (view.error() != C2_OK) {
242 ALOGD("Converter: view.error() = %d", view.error());
243 mInitCheck = BAD_VALUE;
244 return;
245 }
246 MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
247 const C2PlanarLayout &layout = view.layout();
248 if (layout.numPlanes == 0) {
249 ALOGD("Converter: 0 planes");
250 mInitCheck = BAD_VALUE;
251 return;
252 }
Harish Mahendrakarcac53852019-02-20 10:59:10 -0800253 memset(mediaImage, 0, sizeof(*mediaImage));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800254 mAllocatedDepth = layout.planes[0].allocatedDepth;
255 uint32_t bitDepth = layout.planes[0].bitDepth;
256
257 // align width and height to support subsampling cleanly
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700258 uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
259 uint32_t vStride = align(view.crop().height, 2);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800260
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700261 bool tryWrapping = !copy;
262
Pawin Vongmasa36653902018-11-15 00:10:25 -0800263 switch (layout.type) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700264 case C2PlanarLayout::TYPE_YUV: {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800265 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
266 if (layout.numPlanes != 3) {
267 ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
268 mInitCheck = BAD_VALUE;
269 return;
270 }
Arun Johnson3ab32cd2022-06-10 18:58:01 +0000271 std::optional<int> clientBitDepth = {};
272 switch (mClientColorFormat) {
273 case COLOR_FormatYUVP010:
274 clientBitDepth = 10;
275 break;
276 case COLOR_FormatYUV411PackedPlanar:
277 case COLOR_FormatYUV411Planar:
278 case COLOR_FormatYUV420Flexible:
279 case COLOR_FormatYUV420PackedPlanar:
280 case COLOR_FormatYUV420PackedSemiPlanar:
281 case COLOR_FormatYUV420Planar:
282 case COLOR_FormatYUV420SemiPlanar:
283 case COLOR_FormatYUV422Flexible:
284 case COLOR_FormatYUV422PackedPlanar:
285 case COLOR_FormatYUV422PackedSemiPlanar:
286 case COLOR_FormatYUV422Planar:
287 case COLOR_FormatYUV422SemiPlanar:
288 case COLOR_FormatYUV444Flexible:
289 case COLOR_FormatYUV444Interleaved:
290 clientBitDepth = 8;
291 break;
292 default:
293 // no-op; used with optional
294 break;
295
296 }
297 // conversion fails if client bit-depth and the component bit-depth differs
298 if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
299 ALOGD("Bit depth of client: %d and component: %d differs",
300 *clientBitDepth, bitDepth);
301 mInitCheck = BAD_VALUE;
302 return;
303 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700304 C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
305 C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
306 C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
307 if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
308 || uPlane.channel != C2PlaneInfo::CHANNEL_CB
309 || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
310 ALOGD("Converter: not YUV layout");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800311 mInitCheck = BAD_VALUE;
312 return;
313 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700314 bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
315 && uPlane.rowSampling == 2 && uPlane.colSampling == 2
316 && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
317 if (yuv420888) {
318 for (uint32_t i = 0; i < 3; ++i) {
319 const C2PlaneInfo &plane = layout.planes[i];
320 if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
321 yuv420888 = false;
322 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800323 }
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700324 }
325 yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
326 }
327 int32_t copyFormat = mClientColorFormat;
328 if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
329 if (uPlane.colInc == 2 && vPlane.colInc == 2
330 && yPlane.rowInc == uPlane.rowInc) {
331 copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
332 } else if (uPlane.colInc == 1 && vPlane.colInc == 1
333 && yPlane.rowInc == uPlane.rowInc * 2) {
334 copyFormat = COLOR_FormatYUV420PackedPlanar;
335 }
336 }
337 ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
338 "v:{colInc=%d rowInc=%d}",
339 mClientColorFormat,
340 yPlane.colInc, yPlane.rowInc,
341 uPlane.colInc, uPlane.rowInc,
342 vPlane.colInc, vPlane.rowInc);
343 switch (copyFormat) {
344 case COLOR_FormatYUV420Flexible:
Pawin Vongmasa36653902018-11-15 00:10:25 -0800345 case COLOR_FormatYUV420Planar:
346 case COLOR_FormatYUV420PackedPlanar:
347 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
348 mediaImage->mPlane[mediaImage->Y].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700349 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800350 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
351 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
352
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700353 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800354 mediaImage->mPlane[mediaImage->U].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700355 mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800356 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
357 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
358
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700359 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800360 mediaImage->mPlane[mediaImage->V].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700361 mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800362 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
363 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700364
365 if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
366 tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
367 && yPlane.rowInc == uPlane.rowInc * 2
368 && view.data()[0] < view.data()[1]
369 && view.data()[1] < view.data()[2];
370 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800371 break;
372
373 case COLOR_FormatYUV420SemiPlanar:
374 case COLOR_FormatYUV420PackedSemiPlanar:
375 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
376 mediaImage->mPlane[mediaImage->Y].mColInc = 1;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700377 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800378 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
379 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
380
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700381 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800382 mediaImage->mPlane[mediaImage->U].mColInc = 2;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700383 mediaImage->mPlane[mediaImage->U].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800384 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
385 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
386
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700387 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800388 mediaImage->mPlane[mediaImage->V].mColInc = 2;
Wonsik Kim8bfa17a2019-05-30 22:12:30 -0700389 mediaImage->mPlane[mediaImage->V].mRowInc = stride;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800390 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
391 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700392
393 if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
394 tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
395 && yPlane.rowInc == uPlane.rowInc
396 && view.data()[0] < view.data()[1]
397 && view.data()[1] < view.data()[2];
398 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800399 break;
400
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700401 case COLOR_FormatYUVP010:
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700402 // stride is in bytes
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700403 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
404 mediaImage->mPlane[mediaImage->Y].mColInc = 2;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700405 mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700406 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
407 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
408
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700409 mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700410 mediaImage->mPlane[mediaImage->U].mColInc = 4;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700411 mediaImage->mPlane[mediaImage->U].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700412 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
413 mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
414
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700415 mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700416 mediaImage->mPlane[mediaImage->V].mColInc = 4;
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700417 mediaImage->mPlane[mediaImage->V].mRowInc = stride;
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700418 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
419 mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700420 if (tryWrapping) {
421 tryWrapping = yPlane.allocatedDepth == 16
422 && uPlane.allocatedDepth == 16
423 && vPlane.allocatedDepth == 16
424 && yPlane.bitDepth == 10
425 && uPlane.bitDepth == 10
426 && vPlane.bitDepth == 10
427 && yPlane.rightShift == 6
428 && uPlane.rightShift == 6
429 && vPlane.rightShift == 6
430 && yPlane.rowSampling == 1 && yPlane.colSampling == 1
431 && uPlane.rowSampling == 2 && uPlane.colSampling == 2
432 && vPlane.rowSampling == 2 && vPlane.colSampling == 2
433 && yPlane.colInc == 2
434 && uPlane.colInc == 4
435 && vPlane.colInc == 4
436 && yPlane.rowInc == uPlane.rowInc
437 && yPlane.rowInc == vPlane.rowInc;
438 }
Wonsik Kim29e3c4d2020-09-02 12:19:44 -0700439 break;
440
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700441 default: {
442 // default to fully planar format --- this will be overridden if wrapping
443 // TODO: keep interleaved format
444 int32_t colInc = divUp(mAllocatedDepth, 8u);
445 int32_t rowInc = stride * colInc / yPlane.colSampling;
446 mediaImage->mPlane[mediaImage->Y].mOffset = 0;
447 mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
448 mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
449 mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
450 mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
451 int32_t offset = rowInc * vStride / yPlane.rowSampling;
452
453 rowInc = stride * colInc / uPlane.colSampling;
454 mediaImage->mPlane[mediaImage->U].mOffset = offset;
455 mediaImage->mPlane[mediaImage->U].mColInc = colInc;
456 mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
457 mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
458 mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
459 offset += rowInc * vStride / uPlane.rowSampling;
460
461 rowInc = stride * colInc / vPlane.colSampling;
462 mediaImage->mPlane[mediaImage->V].mOffset = offset;
463 mediaImage->mPlane[mediaImage->V].mColInc = colInc;
464 mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
465 mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
466 mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
467 break;
468 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800469 }
470 break;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700471 }
472
Pawin Vongmasa36653902018-11-15 00:10:25 -0800473 case C2PlanarLayout::TYPE_YUVA:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700474 ALOGD("Converter: unrecognized color format "
475 "(client %d component %d) for YUVA layout",
476 mClientColorFormat, mComponentColorFormat);
477 mInitCheck = NO_INIT;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800478 return;
479 case C2PlanarLayout::TYPE_RGB:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700480 ALOGD("Converter: unrecognized color format "
481 "(client %d component %d) for RGB layout",
482 mClientColorFormat, mComponentColorFormat);
483 mInitCheck = NO_INIT;
484 // TODO: support MediaImage layout
485 return;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800486 case C2PlanarLayout::TYPE_RGBA:
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700487 ALOGD("Converter: unrecognized color format "
488 "(client %d component %d) for RGBA layout",
489 mClientColorFormat, mComponentColorFormat);
490 mInitCheck = NO_INIT;
491 // TODO: support MediaImage layout
492 return;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800493 default:
494 mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700495 if (layout.numPlanes == 1) {
496 const C2PlaneInfo &plane = layout.planes[0];
497 if (plane.colInc < 0 || plane.rowInc < 0) {
498 // Copy-only if we have negative colInc/rowInc
499 tryWrapping = false;
500 }
501 mediaImage->mPlane[0].mOffset = 0;
502 mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
503 mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
504 mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
505 mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
506 } else {
507 ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
508 mClientColorFormat, mComponentColorFormat);
509 mInitCheck = NO_INIT;
510 return;
511 }
512 break;
513 }
514 if (tryWrapping) {
515 // try to map directly. check if the planes are near one another
516 const uint8_t *minPtr = mView.data()[0];
517 const uint8_t *maxPtr = mView.data()[0];
518 int32_t planeSize = 0;
519 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
520 const C2PlaneInfo &plane = layout.planes[i];
521 int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
522 ssize_t minOffset = plane.minOffset(
523 mWidth / plane.colSampling, mHeight / plane.rowSampling);
524 ssize_t maxOffset = plane.maxOffset(
525 mWidth / plane.colSampling, mHeight / plane.rowSampling);
526 if (minPtr > mView.data()[i] + minOffset) {
527 minPtr = mView.data()[i] + minOffset;
528 }
529 if (maxPtr < mView.data()[i] + maxOffset) {
530 maxPtr = mView.data()[i] + maxOffset;
531 }
532 planeSize += planeStride * divUp(mAllocatedDepth, 8u)
533 * align(mHeight, 64) / plane.rowSampling;
534 }
535
Wonsik Kimdc173402021-07-22 19:38:17 -0700536 if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700537 // FIXME: this is risky as reading/writing data out of bound results
538 // in an undefined behavior, but gralloc does assume a
539 // contiguous mapping
540 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
541 const C2PlaneInfo &plane = layout.planes[i];
542 mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
543 mediaImage->mPlane[i].mColInc = plane.colInc;
544 mediaImage->mPlane[i].mRowInc = plane.rowInc;
545 mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
546 mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
547 }
548 mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr),
549 maxPtr - minPtr + 1);
550 ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
551 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800552 }
553 mediaImage->mNumPlanes = layout.numPlanes;
Harish Mahendrakarf7c49e22019-05-24 14:19:16 -0700554 mediaImage->mWidth = view.crop().width;
555 mediaImage->mHeight = view.crop().height;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800556 mediaImage->mBitDepth = bitDepth;
557 mediaImage->mBitDepthAllocated = mAllocatedDepth;
558
559 uint32_t bufferSize = 0;
560 for (uint32_t i = 0; i < layout.numPlanes; ++i) {
561 const C2PlaneInfo &plane = layout.planes[i];
562 if (plane.allocatedDepth < plane.bitDepth
563 || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
564 ALOGD("rightShift value of %u unsupported", plane.rightShift);
565 mInitCheck = BAD_VALUE;
566 return;
567 }
568 if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
569 ALOGD("endianness value of %u unsupported", plane.endianness);
570 mInitCheck = BAD_VALUE;
571 return;
572 }
573 if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700574 ALOGD("different allocatedDepth/bitDepth per plane unsupported");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800575 mInitCheck = BAD_VALUE;
576 return;
577 }
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700578 // stride is in bytes
579 bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800580 }
581
582 mBackBufferSize = bufferSize;
583 mInitCheck = OK;
584 }
585
586 status_t initCheck() const { return mInitCheck; }
587
588 uint32_t backBufferSize() const { return mBackBufferSize; }
589
590 /**
591 * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
592 * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
593 * data into a backing buffer explicitly.
594 *
595 * \return media buffer. This is null if wrapping failed.
596 */
597 sp<ABuffer> wrap() const {
598 if (mBackBuffer == nullptr) {
599 return mWrapped;
600 }
601 return nullptr;
602 }
603
604 bool setBackBuffer(const sp<ABuffer> &backBuffer) {
Wonsik Kim186fdbf2019-01-29 13:30:01 -0800605 if (backBuffer == nullptr) {
606 return false;
607 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800608 if (backBuffer->capacity() < mBackBufferSize) {
609 return false;
610 }
611 backBuffer->setRange(0, mBackBufferSize);
612 mBackBuffer = backBuffer;
613 return true;
614 }
615
616 /**
617 * Copy C2GraphicView to MediaImage2.
618 */
619 status_t copyToMediaImage() {
My Name6bd9a7d2022-03-25 12:37:58 -0700620 ATRACE_CALL();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800621 if (mInitCheck != OK) {
622 return mInitCheck;
623 }
624 return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
625 }
626
627 const sp<ABuffer> &imageData() const { return mMediaImage; }
628
629private:
630 status_t mInitCheck;
631
632 const C2GraphicView mView;
633 uint32_t mWidth;
634 uint32_t mHeight;
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700635 int32_t mClientColorFormat; ///< SDK color format for MediaImage
636 int32_t mComponentColorFormat; ///< SDK color format from component
Pawin Vongmasa36653902018-11-15 00:10:25 -0800637 sp<ABuffer> mWrapped; ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
638 uint32_t mAllocatedDepth;
639 uint32_t mBackBufferSize;
640 sp<ABuffer> mMediaImage;
641 std::function<sp<ABuffer>(size_t)> mAlloc;
642
643 sp<ABuffer> mBackBuffer; ///< backing buffer if we have to copy C2Buffer <=> ABuffer
644
645 MediaImage2 *getMediaImage() {
646 return (MediaImage2 *)mMediaImage->base();
647 }
648};
649
650} // namespace
651
652// GraphicBlockBuffer
653
654// static
655sp<GraphicBlockBuffer> GraphicBlockBuffer::Allocate(
656 const sp<AMessage> &format,
657 const std::shared_ptr<C2GraphicBlock> &block,
658 std::function<sp<ABuffer>(size_t)> alloc) {
My Name6bd9a7d2022-03-25 12:37:58 -0700659 ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800660 C2GraphicView view(block->map().get());
My Name6bd9a7d2022-03-25 12:37:58 -0700661 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800662 if (view.error() != C2_OK) {
663 ALOGD("C2GraphicBlock::map failed: %d", view.error());
664 return nullptr;
665 }
666
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700667 GraphicView2MediaImageConverter converter(view, format, false /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800668 if (converter.initCheck() != OK) {
669 ALOGD("Converter init failed: %d", converter.initCheck());
670 return nullptr;
671 }
672 bool wrapped = true;
673 sp<ABuffer> buffer = converter.wrap();
674 if (buffer == nullptr) {
675 buffer = alloc(converter.backBufferSize());
676 if (!converter.setBackBuffer(buffer)) {
677 ALOGD("Converter failed to set back buffer");
678 return nullptr;
679 }
680 wrapped = false;
681 }
682 return new GraphicBlockBuffer(
683 format,
684 buffer,
685 std::move(view),
686 block,
687 converter.imageData(),
688 wrapped);
689}
690
691GraphicBlockBuffer::GraphicBlockBuffer(
692 const sp<AMessage> &format,
693 const sp<ABuffer> &buffer,
694 C2GraphicView &&view,
695 const std::shared_ptr<C2GraphicBlock> &block,
696 const sp<ABuffer> &imageData,
697 bool wrapped)
698 : Codec2Buffer(format, buffer),
699 mView(view),
700 mBlock(block),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800701 mWrapped(wrapped) {
702 setImageData(imageData);
703}
704
705std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
My Name6bd9a7d2022-03-25 12:37:58 -0700706 ATRACE_CALL();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800707 uint32_t width = mView.width();
708 uint32_t height = mView.height();
709 if (!mWrapped) {
710 (void)ImageCopy(mView, base(), imageData());
711 }
712 return C2Buffer::CreateGraphicBuffer(
713 mBlock->share(C2Rect(width, height), C2Fence()));
714}
715
716// GraphicMetadataBuffer
717GraphicMetadataBuffer::GraphicMetadataBuffer(
718 const sp<AMessage> &format,
719 const std::shared_ptr<C2Allocator> &alloc)
720 : Codec2Buffer(format, new ABuffer(sizeof(VideoNativeMetadata))),
721 mAlloc(alloc) {
722 ((VideoNativeMetadata *)base())->pBuffer = nullptr;
723}
724
725std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
bohua222c0b2021-01-12 18:54:53 -0800726#ifdef __LP64__
727 static std::once_flag s_checkOnce;
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700728 static bool s_is64bitOk {true};
bohua222c0b2021-01-12 18:54:53 -0800729 std::call_once(s_checkOnce, [&](){
730 const std::string abi32list =
731 ::android::base::GetProperty("ro.product.cpu.abilist32", "");
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700732 if (!abi32list.empty()) {
733 int32_t inputSurfaceSetting =
734 ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
735 s_is64bitOk = inputSurfaceSetting != 0;
bohua222c0b2021-01-12 18:54:53 -0800736 }
737 });
738
Harish Mahendrakar731e9142021-04-21 17:20:39 -0700739 if (!s_is64bitOk) {
740 ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
741 "when debug.stagefright.c2inputsurface is set to 0");
bohua222c0b2021-01-12 18:54:53 -0800742 return nullptr;
743 }
744#endif
745
Pawin Vongmasa36653902018-11-15 00:10:25 -0800746 VideoNativeMetadata *meta = (VideoNativeMetadata *)base();
747 ANativeWindowBuffer *buffer = (ANativeWindowBuffer *)meta->pBuffer;
748 if (buffer == nullptr) {
749 ALOGD("VideoNativeMetadata contains null buffer");
750 return nullptr;
751 }
752
753 ALOGV("VideoNativeMetadata: %dx%d", buffer->width, buffer->height);
754 C2Handle *handle = WrapNativeCodec2GrallocHandle(
Sungtak Leea4d13be2019-01-23 15:24:46 -0800755 buffer->handle,
Pawin Vongmasa36653902018-11-15 00:10:25 -0800756 buffer->width,
757 buffer->height,
758 buffer->format,
759 buffer->usage,
760 buffer->stride);
761 std::shared_ptr<C2GraphicAllocation> alloc;
762 c2_status_t err = mAlloc->priorGraphicAllocation(handle, &alloc);
763 if (err != C2_OK) {
764 ALOGD("Failed to wrap VideoNativeMetadata into C2GraphicAllocation");
Chih-Yu Huangc0ac3552021-03-11 14:37:10 +0900765 native_handle_close(handle);
766 native_handle_delete(handle);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800767 return nullptr;
768 }
769 std::shared_ptr<C2GraphicBlock> block = _C2BlockFactory::CreateGraphicBlock(alloc);
770
771 meta->pBuffer = 0;
Wonsik Kimebe0f9e2019-07-03 11:06:51 -0700772 // TODO: wrap this in C2Fence so that the component can wait when it
773 // actually starts processing.
774 if (meta->nFenceFd >= 0) {
775 sp<Fence> fence(new Fence(meta->nFenceFd));
776 fence->waitForever(LOG_TAG);
777 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800778 return C2Buffer::CreateGraphicBuffer(
779 block->share(C2Rect(buffer->width, buffer->height), C2Fence()));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800780}
781
782// ConstGraphicBlockBuffer
783
784// static
785sp<ConstGraphicBlockBuffer> ConstGraphicBlockBuffer::Allocate(
786 const sp<AMessage> &format,
787 const std::shared_ptr<C2Buffer> &buffer,
788 std::function<sp<ABuffer>(size_t)> alloc) {
789 if (!buffer
790 || buffer->data().type() != C2BufferData::GRAPHIC
791 || buffer->data().graphicBlocks().size() != 1u) {
792 ALOGD("C2Buffer precond fail");
793 return nullptr;
794 }
My Name6bd9a7d2022-03-25 12:37:58 -0700795 ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800796 std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
797 buffer->data().graphicBlocks()[0].map().get()));
My Name6bd9a7d2022-03-25 12:37:58 -0700798 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800799 std::unique_ptr<const C2GraphicView> holder;
800
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700801 GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800802 if (converter.initCheck() != OK) {
803 ALOGD("Converter init failed: %d", converter.initCheck());
804 return nullptr;
805 }
806 bool wrapped = true;
807 sp<ABuffer> aBuffer = converter.wrap();
808 if (aBuffer == nullptr) {
809 aBuffer = alloc(converter.backBufferSize());
810 if (!converter.setBackBuffer(aBuffer)) {
811 ALOGD("Converter failed to set back buffer");
812 return nullptr;
813 }
814 wrapped = false;
815 converter.copyToMediaImage();
816 // We don't need the view.
817 holder = std::move(view);
818 }
819 return new ConstGraphicBlockBuffer(
820 format,
821 aBuffer,
822 std::move(view),
823 buffer,
824 converter.imageData(),
825 wrapped);
826}
827
828// static
829sp<ConstGraphicBlockBuffer> ConstGraphicBlockBuffer::AllocateEmpty(
830 const sp<AMessage> &format,
831 std::function<sp<ABuffer>(size_t)> alloc) {
832 int32_t width, height;
833 if (!format->findInt32("width", &width)
834 || !format->findInt32("height", &height)) {
835 ALOGD("format had no width / height");
836 return nullptr;
837 }
Wonsik Kim08a8a2b2021-05-10 19:03:47 -0700838 int32_t colorFormat = COLOR_FormatYUV420Flexible;
839 int32_t bpp = 12; // 8(Y) + 2(U) + 2(V)
840 if (format->findInt32(KEY_COLOR_FORMAT, &colorFormat)) {
841 if (colorFormat == COLOR_FormatYUVP010) {
842 bpp = 24; // 16(Y) + 4(U) + 4(V)
843 }
844 }
845 sp<ABuffer> aBuffer(alloc(align(width, 16) * align(height, 16) * bpp / 8));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800846 return new ConstGraphicBlockBuffer(
847 format,
848 aBuffer,
849 nullptr,
850 nullptr,
851 nullptr,
852 false);
853}
854
855ConstGraphicBlockBuffer::ConstGraphicBlockBuffer(
856 const sp<AMessage> &format,
857 const sp<ABuffer> &aBuffer,
858 std::unique_ptr<const C2GraphicView> &&view,
859 const std::shared_ptr<C2Buffer> &buffer,
860 const sp<ABuffer> &imageData,
861 bool wrapped)
862 : Codec2Buffer(format, aBuffer),
863 mView(std::move(view)),
864 mBufferRef(buffer),
865 mWrapped(wrapped) {
Wonsik Kimc48ddcf2019-02-11 16:16:57 -0800866 setImageData(imageData);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800867}
868
869std::shared_ptr<C2Buffer> ConstGraphicBlockBuffer::asC2Buffer() {
Wonsik Kimf9b32122020-04-02 11:30:17 -0700870 return mBufferRef;
871}
872
873void ConstGraphicBlockBuffer::clearC2BufferRefs() {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800874 mView.reset();
Wonsik Kimf9b32122020-04-02 11:30:17 -0700875 mBufferRef.reset();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800876}
877
878bool ConstGraphicBlockBuffer::canCopy(const std::shared_ptr<C2Buffer> &buffer) const {
879 if (mWrapped || mBufferRef) {
880 ALOGD("ConstGraphicBlockBuffer::canCopy: %swrapped ; buffer ref %s",
881 mWrapped ? "" : "not ", mBufferRef ? "exists" : "doesn't exist");
882 return false;
883 }
884 if (!buffer) {
885 // Nothing to copy, so we can copy by doing nothing.
886 return true;
887 }
888 if (buffer->data().type() != C2BufferData::GRAPHIC) {
889 ALOGD("ConstGraphicBlockBuffer::canCopy: buffer precondition unsatisfied");
890 return false;
891 }
892 if (buffer->data().graphicBlocks().size() == 0) {
893 return true;
894 } else if (buffer->data().graphicBlocks().size() != 1u) {
895 ALOGD("ConstGraphicBlockBuffer::canCopy: too many blocks");
896 return false;
897 }
898
My Name6bd9a7d2022-03-25 12:37:58 -0700899 ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
Pawin Vongmasa36653902018-11-15 00:10:25 -0800900 GraphicView2MediaImageConverter converter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700901 buffer->data().graphicBlocks()[0].map().get(),
902 // FIXME: format() is not const, but we cannot change it, so do a const cast here
903 const_cast<ConstGraphicBlockBuffer *>(this)->format(),
904 true /* copy */);
My Name6bd9a7d2022-03-25 12:37:58 -0700905 ATRACE_END();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800906 if (converter.initCheck() != OK) {
907 ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
908 return false;
909 }
910 if (converter.backBufferSize() > capacity()) {
911 ALOGD("ConstGraphicBlockBuffer::canCopy: insufficient capacity: req %u has %zu",
912 converter.backBufferSize(), capacity());
913 return false;
914 }
915 return true;
916}
917
918bool ConstGraphicBlockBuffer::copy(const std::shared_ptr<C2Buffer> &buffer) {
919 if (!buffer || buffer->data().graphicBlocks().size() == 0) {
920 setRange(0, 0);
921 return true;
922 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800923
924 GraphicView2MediaImageConverter converter(
Wonsik Kimd79ee1f2020-08-27 17:41:56 -0700925 buffer->data().graphicBlocks()[0].map().get(), format(), true /* copy */);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800926 if (converter.initCheck() != OK) {
927 ALOGD("ConstGraphicBlockBuffer::copy: converter init failed: %d", converter.initCheck());
928 return false;
929 }
930 sp<ABuffer> aBuffer = new ABuffer(base(), capacity());
931 if (!converter.setBackBuffer(aBuffer)) {
932 ALOGD("ConstGraphicBlockBuffer::copy: set back buffer failed");
933 return false;
934 }
Pin-chih Lin1971e2c2019-04-15 19:36:26 +0800935 setRange(0, aBuffer->size()); // align size info
Pawin Vongmasa36653902018-11-15 00:10:25 -0800936 converter.copyToMediaImage();
937 setImageData(converter.imageData());
938 mBufferRef = buffer;
939 return true;
940}
941
942// EncryptedLinearBlockBuffer
943
944EncryptedLinearBlockBuffer::EncryptedLinearBlockBuffer(
945 const sp<AMessage> &format,
946 const std::shared_ptr<C2LinearBlock> &block,
947 const sp<IMemory> &memory,
948 int32_t heapSeqNum)
Ytai Ben-Tsvi7dd39722019-09-05 15:14:30 -0700949 // TODO: Using unsecurePointer() has some associated security pitfalls
950 // (see declaration for details).
951 // Either document why it is safe in this case or address the
952 // issue (e.g. by copying).
953 : Codec2Buffer(format, new ABuffer(memory->unsecurePointer(), memory->size())),
Pawin Vongmasa36653902018-11-15 00:10:25 -0800954 mBlock(block),
955 mMemory(memory),
956 mHeapSeqNum(heapSeqNum) {
957}
958
959std::shared_ptr<C2Buffer> EncryptedLinearBlockBuffer::asC2Buffer() {
960 return C2Buffer::CreateLinearBuffer(mBlock->share(offset(), size(), C2Fence()));
961}
962
963void EncryptedLinearBlockBuffer::fillSourceBuffer(
Robert Shih895fba92019-07-16 16:29:44 -0700964 hardware::drm::V1_0::SharedBuffer *source) {
965 BufferChannelBase::IMemoryToSharedBuffer(mMemory, mHeapSeqNum, source);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800966}
967
968void EncryptedLinearBlockBuffer::fillSourceBuffer(
969 hardware::cas::native::V1_0::SharedBuffer *source) {
970 ssize_t offset;
971 size_t size;
972
973 mHidlMemory = hardware::fromHeap(mMemory->getMemory(&offset, &size));
974 source->heapBase = *mHidlMemory;
975 source->offset = offset;
976 source->size = size;
977}
978
979bool EncryptedLinearBlockBuffer::copyDecryptedContent(
980 const sp<IMemory> &decrypted, size_t length) {
981 C2WriteView view = mBlock->map().get();
982 if (view.error() != C2_OK) {
983 return false;
984 }
985 if (view.size() < length) {
986 return false;
987 }
Ytai Ben-Tsvi7dd39722019-09-05 15:14:30 -0700988 memcpy(view.data(), decrypted->unsecurePointer(), length);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800989 return true;
990}
991
992bool EncryptedLinearBlockBuffer::copyDecryptedContentFromMemory(size_t length) {
993 return copyDecryptedContent(mMemory, length);
994}
995
996native_handle_t *EncryptedLinearBlockBuffer::handle() const {
997 return const_cast<native_handle_t *>(mBlock->handle());
998}
999
Wonsik Kima79c5522022-01-18 16:29:24 -08001000using ::aidl::android::hardware::graphics::common::Cta861_3;
1001using ::aidl::android::hardware::graphics::common::Smpte2086;
1002
1003using ::android::gralloc4::MetadataType_Cta861_3;
1004using ::android::gralloc4::MetadataType_Smpte2086;
1005using ::android::gralloc4::MetadataType_Smpte2094_40;
1006
1007using ::android::hardware::Return;
1008using ::android::hardware::hidl_vec;
1009
1010using Error4 = ::android::hardware::graphics::mapper::V4_0::Error;
1011using IMapper4 = ::android::hardware::graphics::mapper::V4_0::IMapper;
1012
1013namespace {
1014
1015sp<IMapper4> GetMapper4() {
1016 static sp<IMapper4> sMapper = IMapper4::getService();
1017 return sMapper;
1018}
1019
Wonsik Kime2aa2402022-04-08 14:07:14 -07001020class Gralloc4Buffer {
Wonsik Kima79c5522022-01-18 16:29:24 -08001021public:
Wonsik Kime2aa2402022-04-08 14:07:14 -07001022 Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
1023 sp<IMapper4> mapper = GetMapper4();
1024 if (!mapper) {
1025 return;
1026 }
1027 // Unwrap raw buffer handle from the C2Handle
1028 native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
1029 if (!nh) {
1030 return;
1031 }
1032 // Import the raw handle so IMapper can use the buffer. The imported
1033 // handle must be freed when the client is done with the buffer.
1034 mapper->importBuffer(
1035 hardware::hidl_handle(nh),
1036 [&](const Error4 &error, void *buffer) {
1037 if (error == Error4::NONE) {
1038 mBuffer = buffer;
1039 }
1040 });
1041
1042 // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
1043 // does not clone the fds. Thus we need to delete the handle
1044 // without closing it.
1045 native_handle_delete(nh);
1046 }
1047
1048 ~Gralloc4Buffer() {
1049 sp<IMapper4> mapper = GetMapper4();
1050 if (mapper && mBuffer) {
1051 // Free the imported buffer handle. This does not release the
1052 // underlying buffer itself.
1053 mapper->freeBuffer(mBuffer);
Wonsik Kima79c5522022-01-18 16:29:24 -08001054 }
1055 }
Wonsik Kime2aa2402022-04-08 14:07:14 -07001056
1057 void *get() const { return mBuffer; }
1058 operator bool() const { return (mBuffer != nullptr); }
Wonsik Kima79c5522022-01-18 16:29:24 -08001059private:
Wonsik Kime2aa2402022-04-08 14:07:14 -07001060 void *mBuffer;
Wonsik Kima79c5522022-01-18 16:29:24 -08001061};
1062
1063} // namspace
1064
1065c2_status_t GetHdrMetadataFromGralloc4Handle(
1066 const C2Handle *const handle,
1067 std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
1068 std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
1069 c2_status_t err = C2_OK;
Wonsik Kima79c5522022-01-18 16:29:24 -08001070 sp<IMapper4> mapper = GetMapper4();
Wonsik Kime2aa2402022-04-08 14:07:14 -07001071 Gralloc4Buffer buffer(handle);
1072 if (!mapper || !buffer) {
Wonsik Kima79c5522022-01-18 16:29:24 -08001073 // Gralloc4 not supported; nothing to do
1074 return err;
1075 }
1076 Error4 mapperErr = Error4::NONE;
1077 if (staticInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001078 ALOGV("Grabbing static HDR info from gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001079 staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
1080 memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
1081 (*staticInfo)->maxCll = 0;
1082 (*staticInfo)->maxFall = 0;
1083 IMapper4::get_cb cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1084 mapperErr = err;
1085 if (err != Error4::NONE) {
1086 return;
1087 }
1088
1089 std::optional<Smpte2086> smpte2086;
1090 gralloc4::decodeSmpte2086(vec, &smpte2086);
1091 if (smpte2086) {
1092 (*staticInfo)->mastering.red.x = smpte2086->primaryRed.x;
1093 (*staticInfo)->mastering.red.y = smpte2086->primaryRed.y;
1094 (*staticInfo)->mastering.green.x = smpte2086->primaryGreen.x;
1095 (*staticInfo)->mastering.green.y = smpte2086->primaryGreen.y;
1096 (*staticInfo)->mastering.blue.x = smpte2086->primaryBlue.x;
1097 (*staticInfo)->mastering.blue.y = smpte2086->primaryBlue.y;
1098 (*staticInfo)->mastering.white.x = smpte2086->whitePoint.x;
1099 (*staticInfo)->mastering.white.y = smpte2086->whitePoint.y;
1100
1101 (*staticInfo)->mastering.maxLuminance = smpte2086->maxLuminance;
1102 (*staticInfo)->mastering.minLuminance = smpte2086->minLuminance;
1103 } else {
1104 mapperErr = Error4::BAD_VALUE;
1105 }
1106 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001107 Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001108 if (!ret.isOk()) {
1109 err = C2_REFUSED;
1110 } else if (mapperErr != Error4::NONE) {
1111 err = C2_CORRUPTED;
1112 }
1113 cb = [&mapperErr, staticInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1114 mapperErr = err;
1115 if (err != Error4::NONE) {
1116 return;
1117 }
1118
1119 std::optional<Cta861_3> cta861_3;
1120 gralloc4::decodeCta861_3(vec, &cta861_3);
1121 if (cta861_3) {
1122 (*staticInfo)->maxCll = cta861_3->maxContentLightLevel;
1123 (*staticInfo)->maxFall = cta861_3->maxFrameAverageLightLevel;
1124 } else {
1125 mapperErr = Error4::BAD_VALUE;
1126 }
1127 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001128 ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001129 if (!ret.isOk()) {
1130 err = C2_REFUSED;
1131 } else if (mapperErr != Error4::NONE) {
1132 err = C2_CORRUPTED;
1133 }
1134 }
1135 if (dynamicInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001136 ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001137 dynamicInfo->reset();
1138 IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
1139 mapperErr = err;
1140 if (err != Error4::NONE) {
1141 return;
1142 }
1143 if (!dynamicInfo) {
1144 return;
1145 }
1146 *dynamicInfo = C2StreamHdrDynamicMetadataInfo::input::AllocShared(
1147 vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
1148 memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
1149 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001150 Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
Wonsik Kima79c5522022-01-18 16:29:24 -08001151 if (!ret.isOk() || mapperErr != Error4::NONE) {
1152 dynamicInfo->reset();
1153 }
1154 }
1155
1156 return err;
1157}
1158
1159c2_status_t SetHdrMetadataToGralloc4Handle(
1160 const std::shared_ptr<const C2StreamHdrStaticMetadataInfo::output> &staticInfo,
1161 const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
1162 const C2Handle *const handle) {
1163 c2_status_t err = C2_OK;
Wonsik Kima79c5522022-01-18 16:29:24 -08001164 sp<IMapper4> mapper = GetMapper4();
Wonsik Kime2aa2402022-04-08 14:07:14 -07001165 Gralloc4Buffer buffer(handle);
1166 if (!mapper || !buffer) {
Wonsik Kima79c5522022-01-18 16:29:24 -08001167 // Gralloc4 not supported; nothing to do
1168 return err;
1169 }
1170 if (staticInfo && *staticInfo) {
Wonsik Kime2aa2402022-04-08 14:07:14 -07001171 ALOGV("Setting static HDR info as gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001172 std::optional<Smpte2086> smpte2086 = Smpte2086{
1173 {staticInfo->mastering.red.x, staticInfo->mastering.red.y},
1174 {staticInfo->mastering.green.x, staticInfo->mastering.green.y},
1175 {staticInfo->mastering.blue.x, staticInfo->mastering.blue.y},
1176 {staticInfo->mastering.white.x, staticInfo->mastering.white.y},
1177 staticInfo->mastering.maxLuminance,
1178 staticInfo->mastering.minLuminance,
1179 };
1180 hidl_vec<uint8_t> vec;
Wonsik Kime2aa2402022-04-08 14:07:14 -07001181 if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
1182 && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
1183 && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
1184 && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
1185 && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
1186 && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
1187 && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
1188 && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
1189 && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
1190 && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
1191 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
Wonsik Kima79c5522022-01-18 16:29:24 -08001192 if (!ret.isOk()) {
1193 err = C2_REFUSED;
1194 } else if (ret != Error4::NONE) {
1195 err = C2_CORRUPTED;
1196 }
1197 }
1198 std::optional<Cta861_3> cta861_3 = Cta861_3{
1199 staticInfo->maxCll,
1200 staticInfo->maxFall,
1201 };
Wonsik Kime2aa2402022-04-08 14:07:14 -07001202 if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
1203 && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
1204 Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
Wonsik Kima79c5522022-01-18 16:29:24 -08001205 if (!ret.isOk()) {
1206 err = C2_REFUSED;
1207 } else if (ret != Error4::NONE) {
1208 err = C2_CORRUPTED;
1209 }
1210 }
1211 }
Wonsik Kime2aa2402022-04-08 14:07:14 -07001212 if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
1213 ALOGV("Setting dynamic HDR info as gralloc4 metadata");
Wonsik Kima79c5522022-01-18 16:29:24 -08001214 std::optional<IMapper4::MetadataType> metadataType;
1215 switch (dynamicInfo->m.type_) {
1216 case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_10:
1217 // TODO
1218 break;
1219 case C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40:
1220 metadataType = MetadataType_Smpte2094_40;
1221 break;
1222 }
Taehwan Kim3f3d1ac2022-05-12 10:40:29 +09001223
Wonsik Kima79c5522022-01-18 16:29:24 -08001224 if (metadataType) {
Taehwan Kim3f3d1ac2022-05-12 10:40:29 +09001225 std::vector<uint8_t> smpte2094_40;
1226 smpte2094_40.resize(dynamicInfo->flexCount());
1227 memcpy(smpte2094_40.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
1228
1229 hidl_vec<uint8_t> vec;
1230 if (gralloc4::encodeSmpte2094_40({ smpte2094_40 }, &vec) == OK) {
1231 Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
1232 if (!ret.isOk()) {
1233 err = C2_REFUSED;
1234 } else if (ret != Error4::NONE) {
1235 err = C2_CORRUPTED;
1236 }
Wonsik Kima79c5522022-01-18 16:29:24 -08001237 }
1238 } else {
1239 err = C2_BAD_VALUE;
1240 }
1241 }
1242
1243 return err;
1244}
1245
Pawin Vongmasa36653902018-11-15 00:10:25 -08001246} // namespace android