blob: 3f96cb3ad0e8d9c6fbd4c618ced5645ef029f813 [file] [log] [blame]
Richard Xief2932a02023-10-20 17:37:57 +00001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftDav1dDec"
19#include <android-base/properties.h>
20#include <cutils/properties.h>
21#include <thread>
22
23#include <C2Debug.h>
24#include <C2PlatformSupport.h>
25#include <Codec2BufferUtils.h>
26#include <Codec2CommonUtils.h>
27#include <Codec2Mapper.h>
28#include <SimpleC2Interface.h>
Richard Xief2932a02023-10-20 17:37:57 +000029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32#include "C2SoftDav1dDec.h"
33
Richard Xief2932a02023-10-20 17:37:57 +000034namespace android {
35
Richard Xief2932a02023-10-20 17:37:57 +000036// The number of threads used for the dav1d decoder.
37static const int NUM_THREADS_DAV1D_DEFAULT = 0;
38static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
39
40// codecname set and passed in as a compile flag from Android.bp
41constexpr char COMPONENT_NAME[] = CODECNAME;
42
43constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
44
45class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
46 public:
47 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
48 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
49 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
50 noPrivateBuffers();
51 noInputReferences();
52 noOutputReferences();
53 noInputLatency();
54 noTimeStretch();
55
56 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
57 .withConstValue(new C2ComponentAttributesSetting(
58 C2Component::ATTRIB_IS_TEMPORAL))
59 .build());
60
61 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
62 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
63 .withFields({
64 C2F(mSize, width).inRange(2, 4096),
65 C2F(mSize, height).inRange(2, 4096),
66 })
67 .withSetter(SizeSetter)
68 .build());
69
70 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
71 .withDefault(new C2StreamProfileLevelInfo::input(
72 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
73 .withFields({C2F(mProfileLevel, profile)
74 .oneOf({C2Config::PROFILE_AV1_0,
75 C2Config::PROFILE_AV1_1}),
76 C2F(mProfileLevel, level)
77 .oneOf({
78 C2Config::LEVEL_AV1_2,
79 C2Config::LEVEL_AV1_2_1,
80 C2Config::LEVEL_AV1_2_2,
81 C2Config::LEVEL_AV1_2_3,
82 C2Config::LEVEL_AV1_3,
83 C2Config::LEVEL_AV1_3_1,
84 C2Config::LEVEL_AV1_3_2,
85 C2Config::LEVEL_AV1_3_3,
86 C2Config::LEVEL_AV1_4,
87 C2Config::LEVEL_AV1_4_1,
88 C2Config::LEVEL_AV1_4_2,
89 C2Config::LEVEL_AV1_4_3,
90 C2Config::LEVEL_AV1_5,
91 C2Config::LEVEL_AV1_5_1,
92 C2Config::LEVEL_AV1_5_2,
93 C2Config::LEVEL_AV1_5_3,
94 })})
95 .withSetter(ProfileLevelSetter, mSize)
96 .build());
97
98 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
99 addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
100 .withDefault(mHdr10PlusInfoInput)
101 .withFields({
102 C2F(mHdr10PlusInfoInput, m.value).any(),
103 })
104 .withSetter(Hdr10PlusInfoInputSetter)
105 .build());
106
107 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
108 addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
109 .withDefault(mHdr10PlusInfoOutput)
110 .withFields({
111 C2F(mHdr10PlusInfoOutput, m.value).any(),
112 })
113 .withSetter(Hdr10PlusInfoOutputSetter)
114 .build());
115
116 // default static info
117 C2HdrStaticMetadataStruct defaultStaticInfo{};
118 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
119 addParameter(
120 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
121 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
122 .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
123 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
124 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
125 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
131 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
132 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
133 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
134 .withSetter(HdrStaticInfoSetter)
135 .build());
136
137 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
138 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
139 .withFields({
140 C2F(mSize, width).inRange(2, 2048, 2),
141 C2F(mSize, height).inRange(2, 2048, 2),
142 })
143 .withSetter(MaxPictureSizeSetter, mSize)
144 .build());
145
146 addParameter(
147 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
148 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
149 .withFields({
150 C2F(mMaxInputSize, value).any(),
151 })
152 .calculatedAs(MaxInputSizeSetter, mMaxSize)
153 .build());
154
155 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
156 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
157 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
158 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
159
160 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
161 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
162 helper->addStructDescriptors<C2ChromaOffsetStruct>();
163
164 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
165 .withConstValue(defaultColorInfo)
166 .build());
167
168 addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
169 .withDefault(new C2StreamColorAspectsTuning::output(
170 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
171 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
172 .withFields({C2F(mDefaultColorAspects, range)
173 .inRange(C2Color::RANGE_UNSPECIFIED,
174 C2Color::RANGE_OTHER),
175 C2F(mDefaultColorAspects, primaries)
176 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
177 C2Color::PRIMARIES_OTHER),
178 C2F(mDefaultColorAspects, transfer)
179 .inRange(C2Color::TRANSFER_UNSPECIFIED,
180 C2Color::TRANSFER_OTHER),
181 C2F(mDefaultColorAspects, matrix)
182 .inRange(C2Color::MATRIX_UNSPECIFIED,
183 C2Color::MATRIX_OTHER)})
184 .withSetter(DefaultColorAspectsSetter)
185 .build());
186
187 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
188 .withDefault(new C2StreamColorAspectsInfo::input(
189 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
190 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
191 .withFields({C2F(mCodedColorAspects, range)
192 .inRange(C2Color::RANGE_UNSPECIFIED,
193 C2Color::RANGE_OTHER),
194 C2F(mCodedColorAspects, primaries)
195 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
196 C2Color::PRIMARIES_OTHER),
197 C2F(mCodedColorAspects, transfer)
198 .inRange(C2Color::TRANSFER_UNSPECIFIED,
199 C2Color::TRANSFER_OTHER),
200 C2F(mCodedColorAspects, matrix)
201 .inRange(C2Color::MATRIX_UNSPECIFIED,
202 C2Color::MATRIX_OTHER)})
203 .withSetter(CodedColorAspectsSetter)
204 .build());
205
206 addParameter(
207 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
208 .withDefault(new C2StreamColorAspectsInfo::output(
209 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
210 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
211 .withFields(
212 {C2F(mColorAspects, range)
213 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
214 C2F(mColorAspects, primaries)
215 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
216 C2Color::PRIMARIES_OTHER),
217 C2F(mColorAspects, transfer)
218 .inRange(C2Color::TRANSFER_UNSPECIFIED,
219 C2Color::TRANSFER_OTHER),
220 C2F(mColorAspects, matrix)
221 .inRange(C2Color::MATRIX_UNSPECIFIED,
222 C2Color::MATRIX_OTHER)})
223 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
224 .build());
225
226 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
227 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
228 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
229 }
230 // If color format surface isn't added to supported formats, there is no way to know
231 // when the color-format is configured to surface. This is necessary to be able to
232 // choose 10-bit format while decoding 10-bit clips in surface mode.
233 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
234
235 // TODO: support more formats?
236 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
237 .withDefault(new C2StreamPixelFormatInfo::output(
238 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
239 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
240 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
241 .build());
242 }
243
244 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
245 C2P<C2StreamPictureSizeInfo::output>& me) {
246 (void)mayBlock;
247 C2R res = C2R::Ok();
248 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
249 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
250 me.set().width = oldMe.v.width;
251 }
252 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
253 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
254 me.set().height = oldMe.v.height;
255 }
256 return res;
257 }
258
259 static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
260 const C2P<C2StreamPictureSizeInfo::output>& size) {
261 (void)mayBlock;
262 // TODO: get max width/height from the size's field helpers vs.
263 // hardcoding
264 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
265 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
266 return C2R::Ok();
267 }
268
269 static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
270 const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
271 (void)mayBlock;
272 // assume compression ratio of 2, but enforce a floor
273 me.set().value =
274 c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
275 kMinInputBufferSize);
276 return C2R::Ok();
277 }
278
279 static C2R DefaultColorAspectsSetter(bool mayBlock,
280 C2P<C2StreamColorAspectsTuning::output>& me) {
281 (void)mayBlock;
282 if (me.v.range > C2Color::RANGE_OTHER) {
283 me.set().range = C2Color::RANGE_OTHER;
284 }
285 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
286 me.set().primaries = C2Color::PRIMARIES_OTHER;
287 }
288 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
289 me.set().transfer = C2Color::TRANSFER_OTHER;
290 }
291 if (me.v.matrix > C2Color::MATRIX_OTHER) {
292 me.set().matrix = C2Color::MATRIX_OTHER;
293 }
294 return C2R::Ok();
295 }
296
297 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
298 (void)mayBlock;
299 if (me.v.range > C2Color::RANGE_OTHER) {
300 me.set().range = C2Color::RANGE_OTHER;
301 }
302 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
303 me.set().primaries = C2Color::PRIMARIES_OTHER;
304 }
305 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
306 me.set().transfer = C2Color::TRANSFER_OTHER;
307 }
308 if (me.v.matrix > C2Color::MATRIX_OTHER) {
309 me.set().matrix = C2Color::MATRIX_OTHER;
310 }
311 return C2R::Ok();
312 }
313
314 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
315 const C2P<C2StreamColorAspectsTuning::output>& def,
316 const C2P<C2StreamColorAspectsInfo::input>& coded) {
317 (void)mayBlock;
318 // take default values for all unspecified fields, and coded values for specified ones
319 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
320 me.set().primaries =
321 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
322 me.set().transfer =
323 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
324 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
325 return C2R::Ok();
326 }
327
328 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
329 const C2P<C2StreamPictureSizeInfo::output>& size) {
330 (void)mayBlock;
331 (void)size;
332 (void)me; // TODO: validate
333 return C2R::Ok();
334 }
335
336 std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
337 return mDefaultColorAspects;
338 }
339
340 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
341
342 static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
343 (void)mayBlock;
344 (void)me; // TODO: validate
345 return C2R::Ok();
346 }
347
348 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
349 (void)mayBlock;
350 (void)me; // TODO: validate
351 return C2R::Ok();
352 }
353
354 // unsafe getters
355 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
356 return mPixelFormat;
357 }
358
359 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
360 (void)mayBlock;
361 if (me.v.mastering.red.x > 1) {
362 me.set().mastering.red.x = 1;
363 }
364 if (me.v.mastering.red.y > 1) {
365 me.set().mastering.red.y = 1;
366 }
367 if (me.v.mastering.green.x > 1) {
368 me.set().mastering.green.x = 1;
369 }
370 if (me.v.mastering.green.y > 1) {
371 me.set().mastering.green.y = 1;
372 }
373 if (me.v.mastering.blue.x > 1) {
374 me.set().mastering.blue.x = 1;
375 }
376 if (me.v.mastering.blue.y > 1) {
377 me.set().mastering.blue.y = 1;
378 }
379 if (me.v.mastering.white.x > 1) {
380 me.set().mastering.white.x = 1;
381 }
382 if (me.v.mastering.white.y > 1) {
383 me.set().mastering.white.y = 1;
384 }
385 if (me.v.mastering.maxLuminance > 65535.0) {
386 me.set().mastering.maxLuminance = 65535.0;
387 }
388 if (me.v.mastering.minLuminance > 6.5535) {
389 me.set().mastering.minLuminance = 6.5535;
390 }
391 if (me.v.maxCll > 65535.0) {
392 me.set().maxCll = 65535.0;
393 }
394 if (me.v.maxFall > 65535.0) {
395 me.set().maxFall = 65535.0;
396 }
397 return C2R::Ok();
398 }
399
400 private:
401 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
402 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
403 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
404 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
405 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
406 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
407 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
408 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
409 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
410 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
411 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
412 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
413};
414
415C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
416 const std::shared_ptr<IntfImpl>& intfImpl)
417 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
418 mIntf(intfImpl) {
419 mTimeStart = mTimeEnd = systemTime();
420}
421
422C2SoftDav1dDec::~C2SoftDav1dDec() {
423 onRelease();
424}
425
426c2_status_t C2SoftDav1dDec::onInit() {
427 return initDecoder() ? C2_OK : C2_CORRUPTED;
428}
429
430c2_status_t C2SoftDav1dDec::onStop() {
431 // TODO: b/277797541 - investigate if the decoder needs to be flushed.
432 mSignalledError = false;
433 mSignalledOutputEos = false;
434 return C2_OK;
435}
436
437void C2SoftDav1dDec::onReset() {
438 (void)onStop();
439 c2_status_t err = onFlush_sm();
440 if (err != C2_OK) {
441 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
442 destroyDecoder();
443 if (!initDecoder()) {
444 ALOGE("Hard reset failed.");
445 }
446 }
447}
448
449void C2SoftDav1dDec::flushDav1d() {
450 if (mDav1dCtx) {
451 Dav1dPicture p;
452
453 while (mDecodedPictures.size() > 0) {
454 p = mDecodedPictures.front();
455 mDecodedPictures.pop_front();
456
457 dav1d_picture_unref(&p);
458 }
459
460 int res = 0;
461 while (true) {
462 memset(&p, 0, sizeof(p));
463
464 if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
465 if (res != DAV1D_ERR(EAGAIN)) {
466 ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
467 break;
468 } else {
469 res = 0;
470 break;
471 }
472 } else {
473 dav1d_picture_unref(&p);
474 }
475 }
476
477 dav1d_flush(mDav1dCtx);
478 }
479}
480
481void C2SoftDav1dDec::onRelease() {
482 destroyDecoder();
483}
484
485c2_status_t C2SoftDav1dDec::onFlush_sm() {
486 flushDav1d();
487
488 mSignalledError = false;
489 mSignalledOutputEos = false;
490
491 return C2_OK;
492}
493
494static int GetCPUCoreCount() {
495 int cpuCoreCount = 1;
496#if defined(_SC_NPROCESSORS_ONLN)
497 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
498#else
499 // _SC_NPROC_ONLN must be defined...
500 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
501#endif
502 CHECK(cpuCoreCount >= 1);
503 ALOGV("Number of CPU cores: %d", cpuCoreCount);
504 return cpuCoreCount;
505}
506
507bool C2SoftDav1dDec::initDecoder() {
Richard Xief2932a02023-10-20 17:37:57 +0000508#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530509 mC2SoftDav1dDump.initDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000510#endif
511 mSignalledError = false;
512 mSignalledOutputEos = false;
513 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
514 {
515 IntfImpl::Lock lock = mIntf->lock();
516 mPixelFormatInfo = mIntf->getPixelFormat_l();
517 }
518
519 const char* version = dav1d_version();
520
521 Dav1dSettings lib_settings;
522 dav1d_default_settings(&lib_settings);
523 int cpu_count = GetCPUCoreCount();
524 lib_settings.n_threads = std::max(cpu_count / 2, 1); // use up to half the cores by default.
525
526 int32_t numThreads =
527 android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
528 if (numThreads > 0) lib_settings.n_threads = numThreads;
529
530 int res = 0;
531 if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
532 ALOGE("dav1d_open failed. status: %d.", res);
533 return false;
534 } else {
535 ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
536 }
537
538 return true;
539}
540
541void C2SoftDav1dDec::destroyDecoder() {
542 if (mDav1dCtx) {
543 Dav1dPicture p;
544 while (mDecodedPictures.size() > 0) {
545 memset(&p, 0, sizeof(p));
546 p = mDecodedPictures.front();
547 mDecodedPictures.pop_front();
548
549 dav1d_picture_unref(&p);
550 }
551
552 dav1d_close(&mDav1dCtx);
553 mDav1dCtx = nullptr;
554 mOutputBufferIndex = 0;
555 mInputBufferIndex = 0;
556 }
557#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530558 mC2SoftDav1dDump.destroyDumping();
Richard Xief2932a02023-10-20 17:37:57 +0000559#endif
560}
561
562void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
563 uint32_t flags = 0;
564 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
565 flags |= C2FrameData::FLAG_END_OF_STREAM;
566 ALOGV("signalling end_of_stream.");
567 }
568 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
569 work->worklets.front()->output.buffers.clear();
570 work->worklets.front()->output.ordinal = work->input.ordinal;
571 work->workletsProcessed = 1u;
572}
573
574void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
575 const std::shared_ptr<C2GraphicBlock>& block) {
576 std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
577 {
578 IntfImpl::Lock lock = mIntf->lock();
579 buffer->setInfo(mIntf->getColorAspects_l());
580 }
581 auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
582 uint32_t flags = 0;
583 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
584 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
585 flags |= C2FrameData::FLAG_END_OF_STREAM;
586 ALOGV("signalling end_of_stream.");
587 }
588 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
589 work->worklets.front()->output.buffers.clear();
590 work->worklets.front()->output.buffers.push_back(buffer);
591 work->worklets.front()->output.ordinal = work->input.ordinal;
592 work->workletsProcessed = 1u;
593 };
594 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
595 fillWork(work);
596 } else {
597 finish(index, fillWork);
598 }
599}
600
Haripriya Deshmukh6c417402023-08-14 16:12:37 +0530601static void freeCallback(const uint8_t */*data*/, void */*cookie*/) {
602 return;
603}
604
Richard Xief2932a02023-10-20 17:37:57 +0000605void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
606 const std::shared_ptr<C2BlockPool>& pool) {
607 work->result = C2_OK;
608 work->workletsProcessed = 0u;
609 work->worklets.front()->output.configUpdate.clear();
610 work->worklets.front()->output.flags = work->input.flags;
611 if (mSignalledError || mSignalledOutputEos) {
612 work->result = C2_BAD_VALUE;
613 return;
614 }
615
616 size_t inOffset = 0u;
617 size_t inSize = 0u;
618 C2ReadView rView = mDummyReadView;
619 if (!work->input.buffers.empty()) {
620 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
621 inSize = rView.capacity();
622 if (inSize && rView.error()) {
623 ALOGE("read view map failed %d", rView.error());
624 work->result = C2_CORRUPTED;
625 return;
626 }
627 }
628
629 bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
630 bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
631
632 if (codecConfig) {
633 fillEmptyWork(work);
634 return;
635 }
636
637 int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
638 if (inSize) {
639 mInputBufferIndex = in_frameIndex;
640
641 uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
642
643 mTimeStart = systemTime();
644 nsecs_t delay = mTimeStart - mTimeEnd;
645
646 // Send the bitstream data (inputBuffer) to dav1d.
647 if (mDav1dCtx) {
648 int i_ret = 0;
649
650 Dav1dSequenceHeader seq;
651 int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
652 if (res == 0) {
653 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
654 seq.max_height, (long)in_frameIndex);
655 }
656
Richard Xief2932a02023-10-20 17:37:57 +0000657 Dav1dData data;
658
Haripriya Deshmukh6c417402023-08-14 16:12:37 +0530659 res = dav1d_data_wrap(&data, bitstream, inSize, freeCallback, nullptr);
660 if (res != 0) {
661 ALOGE("Decoder wrap error %s!", strerror(DAV1D_ERR(res)));
Richard Xief2932a02023-10-20 17:37:57 +0000662 i_ret = -1;
Richard Xief2932a02023-10-20 17:37:57 +0000663 } else {
664 data.m.timestamp = in_frameIndex;
Haripriya Deshmukh6c417402023-08-14 16:12:37 +0530665 // ALOGV("inSize=%ld, in_frameIndex=%ld, timestamp=%ld",
666 // inSize, frameIndex, data.m.timestamp);
Richard Xief2932a02023-10-20 17:37:57 +0000667
Richard Xief2932a02023-10-20 17:37:57 +0000668
669 // Dump the bitstream data (inputBuffer) if dumping is enabled.
670#ifdef FILE_DUMP_ENABLE
Suyog Pawar4602c372023-08-17 11:09:23 +0530671 mC2SoftDav1dDump.dumpInput(ptr, new_Size);
Richard Xief2932a02023-10-20 17:37:57 +0000672#endif
673
674 bool b_draining = false;
Richard Xief2932a02023-10-20 17:37:57 +0000675
676 do {
677 res = dav1d_send_data(mDav1dCtx, &data);
678 if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
679 ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
680 /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
681 * to be recoverable. Other errors returned from this function are
682 * either unexpected, or considered critical failures.
683 */
684 i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
685 break;
686 }
687
688 bool b_output_error = false;
689
690 do {
691 Dav1dPicture img;
692 memset(&img, 0, sizeof(img));
693
694 res = dav1d_get_picture(mDav1dCtx, &img);
695 if (res == 0) {
696 mDecodedPictures.push_back(img);
697
698 if (!end_of_stream) break;
699 } else if (res == DAV1D_ERR(EAGAIN)) {
700 /* the decoder needs more data to be able to output something.
701 * if there is more data pending, continue the loop below or
702 * otherwise break */
703 if (data.sz != 0) res = 0;
704 break;
705 } else {
706 ALOGE("warning! Decoder error %d!", res);
707 b_output_error = true;
708 break;
709 }
710 } while (res == 0);
711
712 if (b_output_error) break;
713
714 /* on drain, we must ignore the 1st EAGAIN */
715 if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
716 (end_of_stream)) {
717 b_draining = true;
718 res = 0;
719 }
720 } while (res == 0 && ((data.sz != 0) || b_draining));
721
722 if (data.sz > 0) {
723 ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
724 dav1d_data_unref(&data);
725 }
726 }
727
728 mTimeEnd = systemTime();
729 nsecs_t decodeTime = mTimeEnd - mTimeStart;
730 // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
731
732 if (i_ret != 0) {
733 ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
734 work->result = C2_CORRUPTED;
735 work->workletsProcessed = 1u;
736 mSignalledError = true;
737 return;
738 }
739 }
740 }
741
742 (void)outputBuffer(pool, work);
743
744 if (end_of_stream) {
745 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
746 mSignalledOutputEos = true;
747 } else if (!inSize) {
748 fillEmptyWork(work);
749 }
750}
751
752void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
753 const std::unique_ptr<C2Work>& work) {
754 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
755 bool infoPresent = false;
756
757 if (picture != nullptr) {
758 if (picture->mastering_display != nullptr) {
759 hdrStaticMetadataInfo.mastering.red.x =
760 picture->mastering_display->primaries[0][0] / 65536.0;
761 hdrStaticMetadataInfo.mastering.red.y =
762 picture->mastering_display->primaries[0][1] / 65536.0;
763
764 hdrStaticMetadataInfo.mastering.green.x =
765 picture->mastering_display->primaries[1][0] / 65536.0;
766 hdrStaticMetadataInfo.mastering.green.y =
767 picture->mastering_display->primaries[1][1] / 65536.0;
768
769 hdrStaticMetadataInfo.mastering.blue.x =
770 picture->mastering_display->primaries[2][0] / 65536.0;
771 hdrStaticMetadataInfo.mastering.blue.y =
772 picture->mastering_display->primaries[2][1] / 65536.0;
773
774 hdrStaticMetadataInfo.mastering.white.x =
775 picture->mastering_display->white_point[0] / 65536.0;
776 hdrStaticMetadataInfo.mastering.white.y =
777 picture->mastering_display->white_point[1] / 65536.0;
778
779 hdrStaticMetadataInfo.mastering.maxLuminance =
780 picture->mastering_display->max_luminance / 256.0;
781 hdrStaticMetadataInfo.mastering.minLuminance =
782 picture->mastering_display->min_luminance / 16384.0;
783
784 infoPresent = true;
785 }
786
787 if (picture->content_light != nullptr) {
788 hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
789 hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
790 infoPresent = true;
791 }
792 }
793
794 // if (infoPresent) {
795 // ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
796 // mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
797 // mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
798 // hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
799 // hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
800 // hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
801 // hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
802 // hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
803 // hdrStaticMetadataInfo.maxCll,
804 // hdrStaticMetadataInfo.maxFall,
805 // mOutputBufferIndex);
806 // }
807
808 // config if static info has changed
809 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
810 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
811 work->worklets.front()->output.configUpdate.push_back(
812 C2Param::Copy(mHdrStaticMetadataInfo));
813 }
814}
815
816void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
817 const std::unique_ptr<C2Work>& work) {
818 if (picture != nullptr) {
819 if (picture->itut_t35 != nullptr) {
820 std::vector<uint8_t> payload;
821 size_t payloadSize = picture->itut_t35->payload_size;
822 if (payloadSize > 0) {
823 payload.push_back(picture->itut_t35->country_code);
824 if (picture->itut_t35->country_code == 0xFF) {
825 payload.push_back(picture->itut_t35->country_code_extension_byte);
826 }
827 payload.insert(payload.end(), picture->itut_t35->payload,
828 picture->itut_t35->payload + picture->itut_t35->payload_size);
829 }
830
831 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
832 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
833 if (!hdr10PlusInfo) {
834 ALOGE("Hdr10PlusInfo allocation failed");
835 mSignalledError = true;
836 work->result = C2_NO_MEMORY;
837 return;
838 }
839 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
840
841 // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
842 // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
843 // picture->itut_t35->payload_size,
844 // picture->itut_t35->country_code,
845 // mOutputBufferIndex);
846
847 // config if hdr10Plus info has changed
848 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
849 mHdr10PlusInfo = std::move(hdr10PlusInfo);
850 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
851 }
852 }
853 }
854}
855
856void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
857 VuiColorAspects vuiColorAspects;
858
859 if (picture) {
860 vuiColorAspects.primaries = picture->seq_hdr->pri;
861 vuiColorAspects.transfer = picture->seq_hdr->trc;
862 vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
863 vuiColorAspects.fullRange = picture->seq_hdr->color_range;
864
865 // ALOGD("Received a vuiColorAspects from dav1d
866 // (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
867 // at mOutputBufferIndex = % d,
868 // out_frameIndex = % ld.",
869 // vuiColorAspects.primaries,
870 // vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
871 // mOutputBufferIndex, picture->m.timestamp);
872 }
873
874 // convert vui aspects to C2 values if changed
875 if (!(vuiColorAspects == mBitstreamColorAspects)) {
876 mBitstreamColorAspects = vuiColorAspects;
877 ColorAspects sfAspects;
878 C2StreamColorAspectsInfo::input codedAspects = {0u};
879 ColorUtils::convertIsoColorAspectsToCodecAspects(
880 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
881 vuiColorAspects.fullRange, sfAspects);
882 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
883 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
884 }
885 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
886 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
887 }
888 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
889 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
890 }
891 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
892 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
893 }
894 std::vector<std::unique_ptr<C2SettingResult>> failures;
895 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
896 }
897}
898
899void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
900 mSignalledError = true;
901 work->result = error;
902 work->workletsProcessed = 1u;
903}
904
905bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
906 if (size > mTmpFrameBufferSize) {
907 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
908 if (mTmpFrameBuffer == nullptr) {
909 mTmpFrameBufferSize = 0;
910 return false;
911 }
912 mTmpFrameBufferSize = size;
913 }
914 return true;
915}
916
Richard Xief2932a02023-10-20 17:37:57 +0000917bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
918 const std::unique_ptr<C2Work>& work) {
919 if (!(work && pool)) return false;
920 if (mDav1dCtx == nullptr) return false;
921
922 // Get a decoded picture from dav1d if it is enabled.
923 Dav1dPicture img;
924 memset(&img, 0, sizeof(img));
925
926 int res = 0;
927 if (mDecodedPictures.size() > 0) {
928 img = mDecodedPictures.front();
929 mDecodedPictures.pop_front();
930 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
931 // outputBuffer.",img.m.timestamp,img.m.timestamp);
932 } else {
933 res = dav1d_get_picture(mDav1dCtx, &img);
934 if (res == 0) {
935 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
936 // outputBuffer.",img.m.timestamp,img.m.timestamp);
937 } else {
938 ALOGE("failed to get a picture from dav1d for outputBuffer.");
939 }
940 }
941
942 if (res == DAV1D_ERR(EAGAIN)) {
943 ALOGD("Not enough data to output a picture.");
944 return false;
945 }
946 if (res != 0) {
947 ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
948 return false;
949 }
950
951 const int width = img.p.w;
952 const int height = img.p.h;
953 if (width != mWidth || height != mHeight) {
954 mWidth = width;
955 mHeight = height;
956
957 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
958 std::vector<std::unique_ptr<C2SettingResult>> failures;
959 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
960 if (err == C2_OK) {
961 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
962 } else {
963 ALOGE("Config update size failed");
964 mSignalledError = true;
965 work->result = C2_CORRUPTED;
966 work->workletsProcessed = 1u;
967 return false;
968 }
969 }
970
971 getVuiParams(&img);
972 getHDRStaticParams(&img, work);
973 getHDR10PlusInfoData(&img, work);
974
975 // out_frameIndex that the decoded picture returns from dav1d.
976 int64_t out_frameIndex = img.m.timestamp;
977
Richard Xief2932a02023-10-20 17:37:57 +0000978 const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
979
980 int bitdepth = img.p.bpc;
981
982 std::shared_ptr<C2GraphicBlock> block;
983 uint32_t format = HAL_PIXEL_FORMAT_YV12;
984 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
985 if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
986 IntfImpl::Lock lock = mIntf->lock();
987 codedColorAspects = mIntf->getColorAspects_l();
988 bool allowRGBA1010102 = false;
989 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
990 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
991 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
992 allowRGBA1010102 = true;
993 }
994 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
Richard Xief2932a02023-10-20 17:37:57 +0000995 }
996
997 if (mHalPixelFormat != format) {
998 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
999 std::vector<std::unique_ptr<C2SettingResult>> failures;
1000 c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
1001 if (err == C2_OK) {
1002 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
1003 } else {
1004 ALOGE("Config update pixelFormat failed");
1005 mSignalledError = true;
1006 work->workletsProcessed = 1u;
1007 work->result = C2_CORRUPTED;
1008 return UNKNOWN_ERROR;
1009 }
1010 mHalPixelFormat = format;
1011 }
1012
1013 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
1014
1015 // We always create a graphic block that is width aligned to 16 and height
1016 // aligned to 2. We set the correct "crop" value of the image in the call to
1017 // createGraphicBuffer() by setting the correct image dimensions.
1018 c2_status_t err =
1019 pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
1020
1021 if (err != C2_OK) {
1022 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
1023 work->result = err;
1024 return false;
1025 }
1026
1027 C2GraphicView wView = block->map().get();
1028
1029 if (wView.error()) {
1030 ALOGE("graphic view map failed %d", wView.error());
1031 work->result = C2_CORRUPTED;
1032 return false;
1033 }
1034
1035 // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
1036 // block->height(), mWidth, mHeight, (int)out_frameIndex);
1037
1038 mOutputBufferIndex = out_frameIndex;
1039
1040 uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
1041 uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
1042 uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
1043
1044 C2PlanarLayout layout = wView.layout();
1045 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1046 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1047 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1048
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301049 CONV_FORMAT_T convFormat;
1050 switch (img.p.layout) {
1051 case DAV1D_PIXEL_LAYOUT_I444:
1052 convFormat = CONV_FORMAT_I444;
1053 break;
1054 case DAV1D_PIXEL_LAYOUT_I422:
1055 convFormat = CONV_FORMAT_I422;
1056 break;
1057 default:
1058 convFormat = CONV_FORMAT_I420;
1059 break;
1060 }
1061
Richard Xief2932a02023-10-20 17:37:57 +00001062 if (bitdepth == 10) {
1063 // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
1064 // decompression to avoid color conversion.
1065 const uint16_t* srcY = (const uint16_t*)img.data[0];
1066 const uint16_t* srcU = (const uint16_t*)img.data[1];
1067 const uint16_t* srcV = (const uint16_t*)img.data[2];
1068 size_t srcYStride = img.stride[0] / 2;
1069 size_t srcUStride = img.stride[1] / 2;
1070 size_t srcVStride = img.stride[1] / 2;
1071
1072 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301073 if (isMonochrome) {
1074 const size_t tmpSize = mWidth;
1075 const bool needFill = tmpSize > mTmpFrameBufferSize;
1076 if (!allocTmpFrameBuffer(tmpSize)) {
1077 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1078 setError(work, C2_NO_MEMORY);
1079 return false;
Richard Xief2932a02023-10-20 17:37:57 +00001080 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301081 srcU = srcV = mTmpFrameBuffer.get();
1082 srcUStride = srcVStride = 0;
1083 if (needFill) {
1084 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
1085 }
Richard Xief2932a02023-10-20 17:37:57 +00001086 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301087 convertPlanar16ToY410OrRGBA1010102(
1088 dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1089 dstYStride, mWidth, mHeight,
1090 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects),
1091 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001092 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1093 dstYStride /= 2;
1094 dstUStride /= 2;
1095 dstVStride /= 2;
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301096 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001097 if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
1098 (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301099 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001100 if (!allocTmpFrameBuffer(tmpSize)) {
1101 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1102 setError(work, C2_NO_MEMORY);
1103 return false;
1104 }
Richard Xief2932a02023-10-20 17:37:57 +00001105 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301106 convertPlanar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV, srcYStride,
1107 srcUStride, srcVStride, dstYStride, dstUStride, dstVStride,
1108 mWidth, mHeight, isMonochrome, convFormat, mTmpFrameBuffer.get(),
1109 tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001110 } else {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301111 size_t tmpSize = 0;
Richard Xief2932a02023-10-20 17:37:57 +00001112 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301113 tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
Richard Xief2932a02023-10-20 17:37:57 +00001114 if (!allocTmpFrameBuffer(tmpSize)) {
1115 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1116 setError(work, C2_NO_MEMORY);
1117 return false;
1118 }
Richard Xief2932a02023-10-20 17:37:57 +00001119 }
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301120 convertPlanar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1121 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1122 isMonochrome, convFormat, mTmpFrameBuffer.get(), tmpSize);
Richard Xief2932a02023-10-20 17:37:57 +00001123 }
1124
Richard Xief2932a02023-10-20 17:37:57 +00001125 // if(mOutputBufferIndex % 100 == 0)
1126 ALOGV("output a 10bit picture %dx%d from dav1d "
1127 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1128 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1129
Suyog Pawar4602c372023-08-17 11:09:23 +05301130 // Dump the output buffer if dumping is enabled (debug only).
1131#ifdef FILE_DUMP_ENABLE
1132 mC2SoftDav1dDump.dumpOutput<uint16_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1133 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001134#endif
1135 } else {
1136 const uint8_t* srcY = (const uint8_t*)img.data[0];
1137 const uint8_t* srcU = (const uint8_t*)img.data[1];
1138 const uint8_t* srcV = (const uint8_t*)img.data[2];
1139
1140 size_t srcYStride = img.stride[0];
1141 size_t srcUStride = img.stride[1];
1142 size_t srcVStride = img.stride[1];
1143
Richard Xief2932a02023-10-20 17:37:57 +00001144 // if(mOutputBufferIndex % 100 == 0)
1145 ALOGV("output a 8bit picture %dx%d from dav1d "
1146 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1147 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1148
Suyog Pawar4602c372023-08-17 11:09:23 +05301149 // Dump the output buffer is dumping is enabled (debug only)
1150#ifdef FILE_DUMP_ENABLE
1151 mC2SoftDav1dDump.dumpOutput<uint8_t>(srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1152 mWidth, mHeight);
Richard Xief2932a02023-10-20 17:37:57 +00001153#endif
Harish Mahendrakar29351ea2023-08-24 17:18:56 +05301154 convertPlanar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1155 dstYStride, dstUStride, dstVStride, mWidth, mHeight, isMonochrome,
1156 convFormat);
Richard Xief2932a02023-10-20 17:37:57 +00001157 }
1158
1159 dav1d_picture_unref(&img);
1160
1161 finishWork(out_frameIndex, work, std::move(block));
1162 block = nullptr;
1163 return true;
1164}
1165
1166c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
1167 const std::shared_ptr<C2BlockPool>& pool,
1168 const std::unique_ptr<C2Work>& work) {
1169 if (drainMode == NO_DRAIN) {
1170 ALOGW("drain with NO_DRAIN: no-op");
1171 return C2_OK;
1172 }
1173 if (drainMode == DRAIN_CHAIN) {
1174 ALOGW("DRAIN_CHAIN not supported");
1175 return C2_OMITTED;
1176 }
1177
1178 while (outputBuffer(pool, work)) {
1179 }
1180
1181 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
1182 fillEmptyWork(work);
1183 }
1184
1185 return C2_OK;
1186}
1187
1188c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
1189 return drainInternal(drainMode, pool, nullptr);
1190}
1191
1192class C2SoftDav1dFactory : public C2ComponentFactory {
1193 public:
1194 C2SoftDav1dFactory()
1195 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1196 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1197
1198 virtual c2_status_t createComponent(c2_node_id_t id,
1199 std::shared_ptr<C2Component>* const component,
1200 std::function<void(C2Component*)> deleter) override {
1201 *component = std::shared_ptr<C2Component>(
1202 new C2SoftDav1dDec(COMPONENT_NAME, id,
1203 std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1204 deleter);
1205 return C2_OK;
1206 }
1207
1208 virtual c2_status_t createInterface(
1209 c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
1210 std::function<void(C2ComponentInterface*)> deleter) override {
1211 *interface = std::shared_ptr<C2ComponentInterface>(
1212 new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
1213 COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1214 deleter);
1215 return C2_OK;
1216 }
1217
1218 virtual ~C2SoftDav1dFactory() override = default;
1219
1220 private:
1221 std::shared_ptr<C2ReflectorHelper> mHelper;
1222};
1223
1224} // namespace android
1225
1226__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
1227 ALOGV("in %s", __func__);
1228 return new ::android::C2SoftDav1dFactory();
1229}
1230
1231__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1232 ::C2ComponentFactory* factory) {
1233 ALOGV("in %s", __func__);
1234 delete factory;
1235}