blob: b0cef415d5044e69c8908abbbfc040e3eb57a979 [file] [log] [blame]
Richard Xief2932a02023-10-20 17:37:57 +00001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17// #define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftDav1dDec"
19#include <android-base/properties.h>
20#include <cutils/properties.h>
21#include <thread>
22
23#include <C2Debug.h>
24#include <C2PlatformSupport.h>
25#include <Codec2BufferUtils.h>
26#include <Codec2CommonUtils.h>
27#include <Codec2Mapper.h>
28#include <SimpleC2Interface.h>
29#include <libyuv.h>
30#include <log/log.h>
31#include <media/stagefright/foundation/AUtils.h>
32#include <media/stagefright/foundation/MediaDefs.h>
33#include "C2SoftDav1dDec.h"
34
35// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
36#if LIBYUV_VERSION >= 1780
37#include <algorithm>
38#define HAVE_LIBYUV_I410_I210_TO_AB30 1
39#else
40#define HAVE_LIBYUV_I410_I210_TO_AB30 0
41#endif
42
43namespace android {
44
45// Flag to enable dumping the bitsteram and the decoded pictures to files.
46static const bool ENABLE_DUMPING_FILES_DEFAULT = false;
47static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
48
49// The number of frames to dump to a file
50static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
51static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
52
53// The number of threads used for the dav1d decoder.
54static const int NUM_THREADS_DAV1D_DEFAULT = 0;
55static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
56
57// codecname set and passed in as a compile flag from Android.bp
58constexpr char COMPONENT_NAME[] = CODECNAME;
59
60constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
61
62class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
63 public:
64 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
65 : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
66 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
67 noPrivateBuffers();
68 noInputReferences();
69 noOutputReferences();
70 noInputLatency();
71 noTimeStretch();
72
73 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
74 .withConstValue(new C2ComponentAttributesSetting(
75 C2Component::ATTRIB_IS_TEMPORAL))
76 .build());
77
78 addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
79 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
80 .withFields({
81 C2F(mSize, width).inRange(2, 4096),
82 C2F(mSize, height).inRange(2, 4096),
83 })
84 .withSetter(SizeSetter)
85 .build());
86
87 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
88 .withDefault(new C2StreamProfileLevelInfo::input(
89 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
90 .withFields({C2F(mProfileLevel, profile)
91 .oneOf({C2Config::PROFILE_AV1_0,
92 C2Config::PROFILE_AV1_1}),
93 C2F(mProfileLevel, level)
94 .oneOf({
95 C2Config::LEVEL_AV1_2,
96 C2Config::LEVEL_AV1_2_1,
97 C2Config::LEVEL_AV1_2_2,
98 C2Config::LEVEL_AV1_2_3,
99 C2Config::LEVEL_AV1_3,
100 C2Config::LEVEL_AV1_3_1,
101 C2Config::LEVEL_AV1_3_2,
102 C2Config::LEVEL_AV1_3_3,
103 C2Config::LEVEL_AV1_4,
104 C2Config::LEVEL_AV1_4_1,
105 C2Config::LEVEL_AV1_4_2,
106 C2Config::LEVEL_AV1_4_3,
107 C2Config::LEVEL_AV1_5,
108 C2Config::LEVEL_AV1_5_1,
109 C2Config::LEVEL_AV1_5_2,
110 C2Config::LEVEL_AV1_5_3,
111 })})
112 .withSetter(ProfileLevelSetter, mSize)
113 .build());
114
115 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
116 addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
117 .withDefault(mHdr10PlusInfoInput)
118 .withFields({
119 C2F(mHdr10PlusInfoInput, m.value).any(),
120 })
121 .withSetter(Hdr10PlusInfoInputSetter)
122 .build());
123
124 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
125 addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
126 .withDefault(mHdr10PlusInfoOutput)
127 .withFields({
128 C2F(mHdr10PlusInfoOutput, m.value).any(),
129 })
130 .withSetter(Hdr10PlusInfoOutputSetter)
131 .build());
132
133 // default static info
134 C2HdrStaticMetadataStruct defaultStaticInfo{};
135 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
136 addParameter(
137 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
138 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
139 .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
140 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
141 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
142 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
143 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
144 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
145 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
146 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
147 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
148 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
149 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
150 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
151 .withSetter(HdrStaticInfoSetter)
152 .build());
153
154 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
155 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
156 .withFields({
157 C2F(mSize, width).inRange(2, 2048, 2),
158 C2F(mSize, height).inRange(2, 2048, 2),
159 })
160 .withSetter(MaxPictureSizeSetter, mSize)
161 .build());
162
163 addParameter(
164 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
165 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
166 .withFields({
167 C2F(mMaxInputSize, value).any(),
168 })
169 .calculatedAs(MaxInputSizeSetter, mMaxSize)
170 .build());
171
172 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
173 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
174 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
175 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
176
177 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
178 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
179 helper->addStructDescriptors<C2ChromaOffsetStruct>();
180
181 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
182 .withConstValue(defaultColorInfo)
183 .build());
184
185 addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
186 .withDefault(new C2StreamColorAspectsTuning::output(
187 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
188 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
189 .withFields({C2F(mDefaultColorAspects, range)
190 .inRange(C2Color::RANGE_UNSPECIFIED,
191 C2Color::RANGE_OTHER),
192 C2F(mDefaultColorAspects, primaries)
193 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
194 C2Color::PRIMARIES_OTHER),
195 C2F(mDefaultColorAspects, transfer)
196 .inRange(C2Color::TRANSFER_UNSPECIFIED,
197 C2Color::TRANSFER_OTHER),
198 C2F(mDefaultColorAspects, matrix)
199 .inRange(C2Color::MATRIX_UNSPECIFIED,
200 C2Color::MATRIX_OTHER)})
201 .withSetter(DefaultColorAspectsSetter)
202 .build());
203
204 addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
205 .withDefault(new C2StreamColorAspectsInfo::input(
206 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
207 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
208 .withFields({C2F(mCodedColorAspects, range)
209 .inRange(C2Color::RANGE_UNSPECIFIED,
210 C2Color::RANGE_OTHER),
211 C2F(mCodedColorAspects, primaries)
212 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
213 C2Color::PRIMARIES_OTHER),
214 C2F(mCodedColorAspects, transfer)
215 .inRange(C2Color::TRANSFER_UNSPECIFIED,
216 C2Color::TRANSFER_OTHER),
217 C2F(mCodedColorAspects, matrix)
218 .inRange(C2Color::MATRIX_UNSPECIFIED,
219 C2Color::MATRIX_OTHER)})
220 .withSetter(CodedColorAspectsSetter)
221 .build());
222
223 addParameter(
224 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
225 .withDefault(new C2StreamColorAspectsInfo::output(
226 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
227 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
228 .withFields(
229 {C2F(mColorAspects, range)
230 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
231 C2F(mColorAspects, primaries)
232 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
233 C2Color::PRIMARIES_OTHER),
234 C2F(mColorAspects, transfer)
235 .inRange(C2Color::TRANSFER_UNSPECIFIED,
236 C2Color::TRANSFER_OTHER),
237 C2F(mColorAspects, matrix)
238 .inRange(C2Color::MATRIX_UNSPECIFIED,
239 C2Color::MATRIX_OTHER)})
240 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
241 .build());
242
243 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
244 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
245 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
246 }
247 // If color format surface isn't added to supported formats, there is no way to know
248 // when the color-format is configured to surface. This is necessary to be able to
249 // choose 10-bit format while decoding 10-bit clips in surface mode.
250 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
251
252 // TODO: support more formats?
253 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
254 .withDefault(new C2StreamPixelFormatInfo::output(
255 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
256 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
257 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
258 .build());
259 }
260
261 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
262 C2P<C2StreamPictureSizeInfo::output>& me) {
263 (void)mayBlock;
264 C2R res = C2R::Ok();
265 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
266 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
267 me.set().width = oldMe.v.width;
268 }
269 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
270 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
271 me.set().height = oldMe.v.height;
272 }
273 return res;
274 }
275
276 static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
277 const C2P<C2StreamPictureSizeInfo::output>& size) {
278 (void)mayBlock;
279 // TODO: get max width/height from the size's field helpers vs.
280 // hardcoding
281 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
282 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
283 return C2R::Ok();
284 }
285
286 static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
287 const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
288 (void)mayBlock;
289 // assume compression ratio of 2, but enforce a floor
290 me.set().value =
291 c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
292 kMinInputBufferSize);
293 return C2R::Ok();
294 }
295
296 static C2R DefaultColorAspectsSetter(bool mayBlock,
297 C2P<C2StreamColorAspectsTuning::output>& me) {
298 (void)mayBlock;
299 if (me.v.range > C2Color::RANGE_OTHER) {
300 me.set().range = C2Color::RANGE_OTHER;
301 }
302 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
303 me.set().primaries = C2Color::PRIMARIES_OTHER;
304 }
305 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
306 me.set().transfer = C2Color::TRANSFER_OTHER;
307 }
308 if (me.v.matrix > C2Color::MATRIX_OTHER) {
309 me.set().matrix = C2Color::MATRIX_OTHER;
310 }
311 return C2R::Ok();
312 }
313
314 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
315 (void)mayBlock;
316 if (me.v.range > C2Color::RANGE_OTHER) {
317 me.set().range = C2Color::RANGE_OTHER;
318 }
319 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
320 me.set().primaries = C2Color::PRIMARIES_OTHER;
321 }
322 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
323 me.set().transfer = C2Color::TRANSFER_OTHER;
324 }
325 if (me.v.matrix > C2Color::MATRIX_OTHER) {
326 me.set().matrix = C2Color::MATRIX_OTHER;
327 }
328 return C2R::Ok();
329 }
330
331 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
332 const C2P<C2StreamColorAspectsTuning::output>& def,
333 const C2P<C2StreamColorAspectsInfo::input>& coded) {
334 (void)mayBlock;
335 // take default values for all unspecified fields, and coded values for specified ones
336 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
337 me.set().primaries =
338 coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
339 me.set().transfer =
340 coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
341 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
342 return C2R::Ok();
343 }
344
345 static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
346 const C2P<C2StreamPictureSizeInfo::output>& size) {
347 (void)mayBlock;
348 (void)size;
349 (void)me; // TODO: validate
350 return C2R::Ok();
351 }
352
353 std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
354 return mDefaultColorAspects;
355 }
356
357 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
358
359 static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
360 (void)mayBlock;
361 (void)me; // TODO: validate
362 return C2R::Ok();
363 }
364
365 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
366 (void)mayBlock;
367 (void)me; // TODO: validate
368 return C2R::Ok();
369 }
370
371 // unsafe getters
372 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
373 return mPixelFormat;
374 }
375
376 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
377 (void)mayBlock;
378 if (me.v.mastering.red.x > 1) {
379 me.set().mastering.red.x = 1;
380 }
381 if (me.v.mastering.red.y > 1) {
382 me.set().mastering.red.y = 1;
383 }
384 if (me.v.mastering.green.x > 1) {
385 me.set().mastering.green.x = 1;
386 }
387 if (me.v.mastering.green.y > 1) {
388 me.set().mastering.green.y = 1;
389 }
390 if (me.v.mastering.blue.x > 1) {
391 me.set().mastering.blue.x = 1;
392 }
393 if (me.v.mastering.blue.y > 1) {
394 me.set().mastering.blue.y = 1;
395 }
396 if (me.v.mastering.white.x > 1) {
397 me.set().mastering.white.x = 1;
398 }
399 if (me.v.mastering.white.y > 1) {
400 me.set().mastering.white.y = 1;
401 }
402 if (me.v.mastering.maxLuminance > 65535.0) {
403 me.set().mastering.maxLuminance = 65535.0;
404 }
405 if (me.v.mastering.minLuminance > 6.5535) {
406 me.set().mastering.minLuminance = 6.5535;
407 }
408 if (me.v.maxCll > 65535.0) {
409 me.set().maxCll = 65535.0;
410 }
411 if (me.v.maxFall > 65535.0) {
412 me.set().maxFall = 65535.0;
413 }
414 return C2R::Ok();
415 }
416
417 private:
418 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
419 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
420 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
421 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
422 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
423 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
424 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
425 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
426 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
427 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
428 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
429 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
430};
431
432C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
433 const std::shared_ptr<IntfImpl>& intfImpl)
434 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
435 mIntf(intfImpl) {
436 mTimeStart = mTimeEnd = systemTime();
437}
438
439C2SoftDav1dDec::~C2SoftDav1dDec() {
440 onRelease();
441}
442
443c2_status_t C2SoftDav1dDec::onInit() {
444 return initDecoder() ? C2_OK : C2_CORRUPTED;
445}
446
447c2_status_t C2SoftDav1dDec::onStop() {
448 // TODO: b/277797541 - investigate if the decoder needs to be flushed.
449 mSignalledError = false;
450 mSignalledOutputEos = false;
451 return C2_OK;
452}
453
454void C2SoftDav1dDec::onReset() {
455 (void)onStop();
456 c2_status_t err = onFlush_sm();
457 if (err != C2_OK) {
458 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
459 destroyDecoder();
460 if (!initDecoder()) {
461 ALOGE("Hard reset failed.");
462 }
463 }
464}
465
466void C2SoftDav1dDec::flushDav1d() {
467 if (mDav1dCtx) {
468 Dav1dPicture p;
469
470 while (mDecodedPictures.size() > 0) {
471 p = mDecodedPictures.front();
472 mDecodedPictures.pop_front();
473
474 dav1d_picture_unref(&p);
475 }
476
477 int res = 0;
478 while (true) {
479 memset(&p, 0, sizeof(p));
480
481 if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
482 if (res != DAV1D_ERR(EAGAIN)) {
483 ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
484 break;
485 } else {
486 res = 0;
487 break;
488 }
489 } else {
490 dav1d_picture_unref(&p);
491 }
492 }
493
494 dav1d_flush(mDav1dCtx);
495 }
496}
497
498void C2SoftDav1dDec::onRelease() {
499 destroyDecoder();
500}
501
502c2_status_t C2SoftDav1dDec::onFlush_sm() {
503 flushDav1d();
504
505 mSignalledError = false;
506 mSignalledOutputEos = false;
507
508 return C2_OK;
509}
510
511static int GetCPUCoreCount() {
512 int cpuCoreCount = 1;
513#if defined(_SC_NPROCESSORS_ONLN)
514 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
515#else
516 // _SC_NPROC_ONLN must be defined...
517 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
518#endif
519 CHECK(cpuCoreCount >= 1);
520 ALOGV("Number of CPU cores: %d", cpuCoreCount);
521 return cpuCoreCount;
522}
523
524bool C2SoftDav1dDec::initDecoder() {
525 nsecs_t now = systemTime();
526#ifdef FILE_DUMP_ENABLE
527 snprintf(mInDataFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_DATA_DUMP_EXT);
528 snprintf(mInSizeFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_SIZE_DUMP_EXT);
529 snprintf(mDav1dOutYuvFileName, 256, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
530 OUTPUT_YUV_DUMP_EXT);
531
532 bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
533 ENABLE_DUMPING_FILES_DEFAULT);
534
535 num_frames_to_dump =
536 android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
537
538 if (enableDumping) {
539 ALOGD("enableDumping = %d, num_frames_to_dump = %d", enableDumping, num_frames_to_dump);
540
541 mInDataFile = fopen(mInDataFileName, "wb");
542 if (mInDataFile == nullptr) {
543 ALOGD("Could not open file %s", mInDataFileName);
544 }
545
546 mInSizeFile = fopen(mInSizeFileName, "wb");
547 if (mInSizeFile == nullptr) {
548 ALOGD("Could not open file %s", mInSizeFileName);
549 }
550
551 mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
552 if (mDav1dOutYuvFile == nullptr) {
553 ALOGD("Could not open file %s", mDav1dOutYuvFileName);
554 }
555 }
556#endif
557 mSignalledError = false;
558 mSignalledOutputEos = false;
559 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
560 {
561 IntfImpl::Lock lock = mIntf->lock();
562 mPixelFormatInfo = mIntf->getPixelFormat_l();
563 }
564
565 const char* version = dav1d_version();
566
567 Dav1dSettings lib_settings;
568 dav1d_default_settings(&lib_settings);
569 int cpu_count = GetCPUCoreCount();
570 lib_settings.n_threads = std::max(cpu_count / 2, 1); // use up to half the cores by default.
571
572 int32_t numThreads =
573 android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
574 if (numThreads > 0) lib_settings.n_threads = numThreads;
575
576 int res = 0;
577 if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
578 ALOGE("dav1d_open failed. status: %d.", res);
579 return false;
580 } else {
581 ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
582 }
583
584 return true;
585}
586
587void C2SoftDav1dDec::destroyDecoder() {
588 if (mDav1dCtx) {
589 Dav1dPicture p;
590 while (mDecodedPictures.size() > 0) {
591 memset(&p, 0, sizeof(p));
592 p = mDecodedPictures.front();
593 mDecodedPictures.pop_front();
594
595 dav1d_picture_unref(&p);
596 }
597
598 dav1d_close(&mDav1dCtx);
599 mDav1dCtx = nullptr;
600 mOutputBufferIndex = 0;
601 mInputBufferIndex = 0;
602 }
603#ifdef FILE_DUMP_ENABLE
604 if (mInDataFile != nullptr) {
605 fclose(mInDataFile);
606 mInDataFile = nullptr;
607 }
608
609 if (mInSizeFile != nullptr) {
610 fclose(mInSizeFile);
611 mInSizeFile = nullptr;
612 }
613
614 if (mDav1dOutYuvFile != nullptr) {
615 fclose(mDav1dOutYuvFile);
616 mDav1dOutYuvFile = nullptr;
617 }
618#endif
619}
620
621void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
622 uint32_t flags = 0;
623 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
624 flags |= C2FrameData::FLAG_END_OF_STREAM;
625 ALOGV("signalling end_of_stream.");
626 }
627 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
628 work->worklets.front()->output.buffers.clear();
629 work->worklets.front()->output.ordinal = work->input.ordinal;
630 work->workletsProcessed = 1u;
631}
632
633void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
634 const std::shared_ptr<C2GraphicBlock>& block) {
635 std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
636 {
637 IntfImpl::Lock lock = mIntf->lock();
638 buffer->setInfo(mIntf->getColorAspects_l());
639 }
640 auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
641 uint32_t flags = 0;
642 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
643 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
644 flags |= C2FrameData::FLAG_END_OF_STREAM;
645 ALOGV("signalling end_of_stream.");
646 }
647 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
648 work->worklets.front()->output.buffers.clear();
649 work->worklets.front()->output.buffers.push_back(buffer);
650 work->worklets.front()->output.ordinal = work->input.ordinal;
651 work->workletsProcessed = 1u;
652 };
653 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
654 fillWork(work);
655 } else {
656 finish(index, fillWork);
657 }
658}
659
660void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
661 const std::shared_ptr<C2BlockPool>& pool) {
662 work->result = C2_OK;
663 work->workletsProcessed = 0u;
664 work->worklets.front()->output.configUpdate.clear();
665 work->worklets.front()->output.flags = work->input.flags;
666 if (mSignalledError || mSignalledOutputEos) {
667 work->result = C2_BAD_VALUE;
668 return;
669 }
670
671 size_t inOffset = 0u;
672 size_t inSize = 0u;
673 C2ReadView rView = mDummyReadView;
674 if (!work->input.buffers.empty()) {
675 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
676 inSize = rView.capacity();
677 if (inSize && rView.error()) {
678 ALOGE("read view map failed %d", rView.error());
679 work->result = C2_CORRUPTED;
680 return;
681 }
682 }
683
684 bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
685 bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
686
687 if (codecConfig) {
688 fillEmptyWork(work);
689 return;
690 }
691
692 int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
693 if (inSize) {
694 mInputBufferIndex = in_frameIndex;
695
696 uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
697
698 mTimeStart = systemTime();
699 nsecs_t delay = mTimeStart - mTimeEnd;
700
701 // Send the bitstream data (inputBuffer) to dav1d.
702 if (mDav1dCtx) {
703 int i_ret = 0;
704
705 Dav1dSequenceHeader seq;
706 int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
707 if (res == 0) {
708 ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
709 seq.max_height, (long)in_frameIndex);
710 }
711
712 // insert OBU TD if it is not present.
713 // TODO: b/286852962
714 uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
715 Dav1dData data;
716
717 uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
718 : dav1d_data_create(&data, inSize + 2);
719 if (ptr == nullptr) {
720 ALOGE("dav1d_data_create failed!");
721 i_ret = -1;
722
723 } else {
724 data.m.timestamp = in_frameIndex;
725
726 int new_Size;
727 if (obu_type != DAV1D_OBU_TD) {
728 new_Size = (int)(inSize + 2);
729
730 // OBU TD
731 ptr[0] = 0x12;
732 ptr[1] = 0;
733
734 memcpy(ptr + 2, bitstream, inSize);
735 } else {
736 new_Size = (int)(inSize);
737 // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
738 // avoid memcopy operations.
739 memcpy(ptr, bitstream, new_Size);
740 }
741
742 // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
743 // "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
744 // inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
745 // ptr[3], ptr[4]);
746
747 // Dump the bitstream data (inputBuffer) if dumping is enabled.
748#ifdef FILE_DUMP_ENABLE
749 if (mInDataFile) {
750 int ret = fwrite(ptr, 1, new_Size, mInDataFile);
751
752 if (ret != new_Size) {
753 ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName,
754 new_Size, ret);
755 }
756 }
757
758 // Dump the size per inputBuffer if dumping is enabled.
759 if (mInSizeFile) {
760 int ret = fwrite(&new_Size, 1, 4, mInSizeFile);
761
762 if (ret != 4) {
763 ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4,
764 ret);
765 }
766 }
767#endif
768
769 bool b_draining = false;
770 int res;
771
772 do {
773 res = dav1d_send_data(mDav1dCtx, &data);
774 if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
775 ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
776 /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
777 * to be recoverable. Other errors returned from this function are
778 * either unexpected, or considered critical failures.
779 */
780 i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
781 break;
782 }
783
784 bool b_output_error = false;
785
786 do {
787 Dav1dPicture img;
788 memset(&img, 0, sizeof(img));
789
790 res = dav1d_get_picture(mDav1dCtx, &img);
791 if (res == 0) {
792 mDecodedPictures.push_back(img);
793
794 if (!end_of_stream) break;
795 } else if (res == DAV1D_ERR(EAGAIN)) {
796 /* the decoder needs more data to be able to output something.
797 * if there is more data pending, continue the loop below or
798 * otherwise break */
799 if (data.sz != 0) res = 0;
800 break;
801 } else {
802 ALOGE("warning! Decoder error %d!", res);
803 b_output_error = true;
804 break;
805 }
806 } while (res == 0);
807
808 if (b_output_error) break;
809
810 /* on drain, we must ignore the 1st EAGAIN */
811 if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
812 (end_of_stream)) {
813 b_draining = true;
814 res = 0;
815 }
816 } while (res == 0 && ((data.sz != 0) || b_draining));
817
818 if (data.sz > 0) {
819 ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
820 dav1d_data_unref(&data);
821 }
822 }
823
824 mTimeEnd = systemTime();
825 nsecs_t decodeTime = mTimeEnd - mTimeStart;
826 // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
827
828 if (i_ret != 0) {
829 ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
830 work->result = C2_CORRUPTED;
831 work->workletsProcessed = 1u;
832 mSignalledError = true;
833 return;
834 }
835 }
836 }
837
838 (void)outputBuffer(pool, work);
839
840 if (end_of_stream) {
841 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
842 mSignalledOutputEos = true;
843 } else if (!inSize) {
844 fillEmptyWork(work);
845 }
846}
847
848void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
849 const std::unique_ptr<C2Work>& work) {
850 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
851 bool infoPresent = false;
852
853 if (picture != nullptr) {
854 if (picture->mastering_display != nullptr) {
855 hdrStaticMetadataInfo.mastering.red.x =
856 picture->mastering_display->primaries[0][0] / 65536.0;
857 hdrStaticMetadataInfo.mastering.red.y =
858 picture->mastering_display->primaries[0][1] / 65536.0;
859
860 hdrStaticMetadataInfo.mastering.green.x =
861 picture->mastering_display->primaries[1][0] / 65536.0;
862 hdrStaticMetadataInfo.mastering.green.y =
863 picture->mastering_display->primaries[1][1] / 65536.0;
864
865 hdrStaticMetadataInfo.mastering.blue.x =
866 picture->mastering_display->primaries[2][0] / 65536.0;
867 hdrStaticMetadataInfo.mastering.blue.y =
868 picture->mastering_display->primaries[2][1] / 65536.0;
869
870 hdrStaticMetadataInfo.mastering.white.x =
871 picture->mastering_display->white_point[0] / 65536.0;
872 hdrStaticMetadataInfo.mastering.white.y =
873 picture->mastering_display->white_point[1] / 65536.0;
874
875 hdrStaticMetadataInfo.mastering.maxLuminance =
876 picture->mastering_display->max_luminance / 256.0;
877 hdrStaticMetadataInfo.mastering.minLuminance =
878 picture->mastering_display->min_luminance / 16384.0;
879
880 infoPresent = true;
881 }
882
883 if (picture->content_light != nullptr) {
884 hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
885 hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
886 infoPresent = true;
887 }
888 }
889
890 // if (infoPresent) {
891 // ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
892 // mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
893 // mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
894 // hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
895 // hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
896 // hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
897 // hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
898 // hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
899 // hdrStaticMetadataInfo.maxCll,
900 // hdrStaticMetadataInfo.maxFall,
901 // mOutputBufferIndex);
902 // }
903
904 // config if static info has changed
905 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
906 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
907 work->worklets.front()->output.configUpdate.push_back(
908 C2Param::Copy(mHdrStaticMetadataInfo));
909 }
910}
911
912void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
913 const std::unique_ptr<C2Work>& work) {
914 if (picture != nullptr) {
915 if (picture->itut_t35 != nullptr) {
916 std::vector<uint8_t> payload;
917 size_t payloadSize = picture->itut_t35->payload_size;
918 if (payloadSize > 0) {
919 payload.push_back(picture->itut_t35->country_code);
920 if (picture->itut_t35->country_code == 0xFF) {
921 payload.push_back(picture->itut_t35->country_code_extension_byte);
922 }
923 payload.insert(payload.end(), picture->itut_t35->payload,
924 picture->itut_t35->payload + picture->itut_t35->payload_size);
925 }
926
927 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
928 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
929 if (!hdr10PlusInfo) {
930 ALOGE("Hdr10PlusInfo allocation failed");
931 mSignalledError = true;
932 work->result = C2_NO_MEMORY;
933 return;
934 }
935 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
936
937 // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
938 // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
939 // picture->itut_t35->payload_size,
940 // picture->itut_t35->country_code,
941 // mOutputBufferIndex);
942
943 // config if hdr10Plus info has changed
944 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
945 mHdr10PlusInfo = std::move(hdr10PlusInfo);
946 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
947 }
948 }
949 }
950}
951
952void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
953 VuiColorAspects vuiColorAspects;
954
955 if (picture) {
956 vuiColorAspects.primaries = picture->seq_hdr->pri;
957 vuiColorAspects.transfer = picture->seq_hdr->trc;
958 vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
959 vuiColorAspects.fullRange = picture->seq_hdr->color_range;
960
961 // ALOGD("Received a vuiColorAspects from dav1d
962 // (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
963 // at mOutputBufferIndex = % d,
964 // out_frameIndex = % ld.",
965 // vuiColorAspects.primaries,
966 // vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
967 // mOutputBufferIndex, picture->m.timestamp);
968 }
969
970 // convert vui aspects to C2 values if changed
971 if (!(vuiColorAspects == mBitstreamColorAspects)) {
972 mBitstreamColorAspects = vuiColorAspects;
973 ColorAspects sfAspects;
974 C2StreamColorAspectsInfo::input codedAspects = {0u};
975 ColorUtils::convertIsoColorAspectsToCodecAspects(
976 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
977 vuiColorAspects.fullRange, sfAspects);
978 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
979 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
980 }
981 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
982 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
983 }
984 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
985 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
986 }
987 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
988 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
989 }
990 std::vector<std::unique_ptr<C2SettingResult>> failures;
991 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
992 }
993}
994
995void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
996 mSignalledError = true;
997 work->result = error;
998 work->workletsProcessed = 1u;
999}
1000
1001bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
1002 if (size > mTmpFrameBufferSize) {
1003 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
1004 if (mTmpFrameBuffer == nullptr) {
1005 mTmpFrameBufferSize = 0;
1006 return false;
1007 }
1008 mTmpFrameBufferSize = size;
1009 }
1010 return true;
1011}
1012
1013#ifdef FILE_DUMP_ENABLE
1014void C2SoftDav1dDec::writeDav1dOutYuvFile(const Dav1dPicture& p) {
1015 if (mDav1dOutYuvFile != NULL) {
1016 uint8_t* ptr;
1017 const int hbd = p.p.bpc > 8;
1018
1019 ptr = (uint8_t*)p.data[0];
1020 for (int y = 0; y < p.p.h; y++) {
1021 int iSize = p.p.w << hbd;
1022 int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
1023 if (ret != iSize) {
1024 ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
1025 ret);
1026 break;
1027 }
1028
1029 ptr += p.stride[0];
1030 }
1031
1032 if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
1033 // u/v
1034 const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
1035 const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
1036 const int cw = (p.p.w + ss_hor) >> ss_hor;
1037 const int ch = (p.p.h + ss_ver) >> ss_ver;
1038 for (int pl = 1; pl <= 2; pl++) {
1039 ptr = (uint8_t*)p.data[pl];
1040 for (int y = 0; y < ch; y++) {
1041 int iSize = cw << hbd;
1042 int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
1043 if (ret != iSize) {
1044 ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
1045 iSize, ret);
1046 break;
1047 }
1048 ptr += p.stride[1];
1049 }
1050 }
1051 }
1052 }
1053}
1054#endif
1055
1056bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
1057 const std::unique_ptr<C2Work>& work) {
1058 if (!(work && pool)) return false;
1059 if (mDav1dCtx == nullptr) return false;
1060
1061 // Get a decoded picture from dav1d if it is enabled.
1062 Dav1dPicture img;
1063 memset(&img, 0, sizeof(img));
1064
1065 int res = 0;
1066 if (mDecodedPictures.size() > 0) {
1067 img = mDecodedPictures.front();
1068 mDecodedPictures.pop_front();
1069 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
1070 // outputBuffer.",img.m.timestamp,img.m.timestamp);
1071 } else {
1072 res = dav1d_get_picture(mDav1dCtx, &img);
1073 if (res == 0) {
1074 // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
1075 // outputBuffer.",img.m.timestamp,img.m.timestamp);
1076 } else {
1077 ALOGE("failed to get a picture from dav1d for outputBuffer.");
1078 }
1079 }
1080
1081 if (res == DAV1D_ERR(EAGAIN)) {
1082 ALOGD("Not enough data to output a picture.");
1083 return false;
1084 }
1085 if (res != 0) {
1086 ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
1087 return false;
1088 }
1089
1090 const int width = img.p.w;
1091 const int height = img.p.h;
1092 if (width != mWidth || height != mHeight) {
1093 mWidth = width;
1094 mHeight = height;
1095
1096 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
1097 std::vector<std::unique_ptr<C2SettingResult>> failures;
1098 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
1099 if (err == C2_OK) {
1100 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
1101 } else {
1102 ALOGE("Config update size failed");
1103 mSignalledError = true;
1104 work->result = C2_CORRUPTED;
1105 work->workletsProcessed = 1u;
1106 return false;
1107 }
1108 }
1109
1110 getVuiParams(&img);
1111 getHDRStaticParams(&img, work);
1112 getHDR10PlusInfoData(&img, work);
1113
1114 // out_frameIndex that the decoded picture returns from dav1d.
1115 int64_t out_frameIndex = img.m.timestamp;
1116
1117#if LIBYUV_VERSION < 1779
1118 if (!(img.p.layout != DAV1D_PIXEL_LAYOUT_I400 || img.p.layout != DAV1D_PIXEL_LAYOUT_I420)) {
1119 ALOGE("image_format %d not supported", img.p.layout);
1120 mSignalledError = true;
1121 work->workletsProcessed = 1u;
1122 work->result = C2_CORRUPTED;
1123 return false;
1124 }
1125#endif
1126
1127 const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
1128
1129 int bitdepth = img.p.bpc;
1130
1131 std::shared_ptr<C2GraphicBlock> block;
1132 uint32_t format = HAL_PIXEL_FORMAT_YV12;
1133 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
1134 if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
1135 IntfImpl::Lock lock = mIntf->lock();
1136 codedColorAspects = mIntf->getColorAspects_l();
1137 bool allowRGBA1010102 = false;
1138 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
1139 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
1140 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
1141 allowRGBA1010102 = true;
1142 }
1143 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
1144#if !HAVE_LIBYUV_I410_I210_TO_AB30
1145 if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
1146 (is_img_ready ? img.p.layout == DAV1D_PIXEL_LAYOUT_I420
1147 : buffer->image_format != libgav1::kImageFormatYuv420)) {
1148 ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
1149 mSignalledError = true;
1150 work->result = C2_OMITTED;
1151 work->workletsProcessed = 1u;
1152 return false;
1153 }
1154#endif
1155 }
1156
1157 if (mHalPixelFormat != format) {
1158 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
1159 std::vector<std::unique_ptr<C2SettingResult>> failures;
1160 c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
1161 if (err == C2_OK) {
1162 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
1163 } else {
1164 ALOGE("Config update pixelFormat failed");
1165 mSignalledError = true;
1166 work->workletsProcessed = 1u;
1167 work->result = C2_CORRUPTED;
1168 return UNKNOWN_ERROR;
1169 }
1170 mHalPixelFormat = format;
1171 }
1172
1173 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
1174
1175 // We always create a graphic block that is width aligned to 16 and height
1176 // aligned to 2. We set the correct "crop" value of the image in the call to
1177 // createGraphicBuffer() by setting the correct image dimensions.
1178 c2_status_t err =
1179 pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
1180
1181 if (err != C2_OK) {
1182 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
1183 work->result = err;
1184 return false;
1185 }
1186
1187 C2GraphicView wView = block->map().get();
1188
1189 if (wView.error()) {
1190 ALOGE("graphic view map failed %d", wView.error());
1191 work->result = C2_CORRUPTED;
1192 return false;
1193 }
1194
1195 // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
1196 // block->height(), mWidth, mHeight, (int)out_frameIndex);
1197
1198 mOutputBufferIndex = out_frameIndex;
1199
1200 uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
1201 uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
1202 uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
1203
1204 C2PlanarLayout layout = wView.layout();
1205 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1206 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1207 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1208
1209 if (bitdepth == 10) {
1210 // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
1211 // decompression to avoid color conversion.
1212 const uint16_t* srcY = (const uint16_t*)img.data[0];
1213 const uint16_t* srcU = (const uint16_t*)img.data[1];
1214 const uint16_t* srcV = (const uint16_t*)img.data[2];
1215 size_t srcYStride = img.stride[0] / 2;
1216 size_t srcUStride = img.stride[1] / 2;
1217 size_t srcVStride = img.stride[1] / 2;
1218
1219 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
1220 bool processed = false;
1221#if HAVE_LIBYUV_I410_I210_TO_AB30
1222 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
1223 libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
1224 dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
1225 processed = true;
1226 } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
1227 libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
1228 dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
1229 processed = true;
1230 }
1231#endif // HAVE_LIBYUV_I410_I210_TO_AB30
1232 if (!processed) {
1233 if (isMonochrome) {
1234 const size_t tmpSize = mWidth;
1235 const bool needFill = tmpSize > mTmpFrameBufferSize;
1236 if (!allocTmpFrameBuffer(tmpSize)) {
1237 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1238 setError(work, C2_NO_MEMORY);
1239 return false;
1240 }
1241 srcU = srcV = mTmpFrameBuffer.get();
1242 srcUStride = srcVStride = 0;
1243 if (needFill) {
1244 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
1245 }
1246 }
1247 convertYUV420Planar16ToY410OrRGBA1010102(
1248 (uint32_t*)dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
1249 dstYStride / sizeof(uint32_t), mWidth, mHeight,
1250 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
1251 }
1252 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1253 dstYStride /= 2;
1254 dstUStride /= 2;
1255 dstVStride /= 2;
1256#if LIBYUV_VERSION >= 1779
1257 if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
1258 (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
1259 // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
1260 // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
1261 // in I010ToP010, but the libyuv API doesn't make any guarantees.
1262 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1263 if (!allocTmpFrameBuffer(tmpSize)) {
1264 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1265 setError(work, C2_NO_MEMORY);
1266 return false;
1267 }
1268 uint16_t* const tmpY = mTmpFrameBuffer.get();
1269 uint16_t* const tmpU = tmpY + dstYStride * mHeight;
1270 uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1271 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
1272 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
1273 dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
1274 mHeight);
1275 } else {
1276 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
1277 dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
1278 mHeight);
1279 }
1280 libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1281 (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride, mWidth,
1282 mHeight);
1283 } else {
1284 convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
1285 srcYStride, srcUStride, srcVStride, dstYStride,
1286 dstUStride, mWidth, mHeight, isMonochrome);
1287 }
1288#else // LIBYUV_VERSION < 1779
1289 convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
1290 srcYStride, srcUStride, srcVStride, dstYStride, dstUStride,
1291 mWidth, mHeight, isMonochrome);
1292#endif // LIBYUV_VERSION >= 1779
1293 } else {
1294#if LIBYUV_VERSION >= 1779
1295 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
1296 // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
1297 // when it's available.
1298 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1299 if (!allocTmpFrameBuffer(tmpSize)) {
1300 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1301 setError(work, C2_NO_MEMORY);
1302 return false;
1303 }
1304 uint16_t* const tmpY = mTmpFrameBuffer.get();
1305 uint16_t* const tmpU = tmpY + dstYStride * mHeight;
1306 uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1307 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
1308 dstYStride, tmpU, dstUStride, tmpV, dstVStride, mWidth, mHeight);
1309 libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY,
1310 dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
1311 } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
1312 libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
1313 dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
1314 } else {
1315 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1316 srcUStride, srcVStride, dstYStride, dstUStride, mWidth,
1317 mHeight, isMonochrome);
1318 }
1319#else // LIBYUV_VERSION < 1779
1320 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1321 srcVStride, dstYStride, dstUStride, mWidth, mHeight,
1322 isMonochrome);
1323#endif // LIBYUV_VERSION >= 1779
1324 }
1325
1326 // Dump the output buffer if dumping is enabled (debug only).
1327#ifdef FILE_DUMP_ENABLE
1328 FILE* fp_out = mDav1dOutYuvFile;
1329
1330 // if(mOutputBufferIndex % 100 == 0)
1331 ALOGV("output a 10bit picture %dx%d from dav1d "
1332 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1333 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1334
1335 if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
1336 for (int i = 0; i < mHeight; i++) {
1337 int ret = fwrite((uint8_t*)srcY + i * srcYStride * 2, 1, mWidth * 2, fp_out);
1338 if (ret != mWidth * 2) {
1339 ALOGE("Error in fwrite, requested %d, returned %d", mWidth * 2, ret);
1340 break;
1341 }
1342 }
1343
1344 for (int i = 0; i < mHeight / 2; i++) {
1345 int ret = fwrite((uint8_t*)srcU + i * srcUStride * 2, 1, mWidth, fp_out);
1346 if (ret != mWidth) {
1347 ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
1348 break;
1349 }
1350 }
1351
1352 for (int i = 0; i < mHeight / 2; i++) {
1353 int ret = fwrite((uint8_t*)srcV + i * srcVStride * 2, 1, mWidth, fp_out);
1354 if (ret != mWidth) {
1355 ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
1356 break;
1357 }
1358 }
1359 }
1360#endif
1361 } else {
1362 const uint8_t* srcY = (const uint8_t*)img.data[0];
1363 const uint8_t* srcU = (const uint8_t*)img.data[1];
1364 const uint8_t* srcV = (const uint8_t*)img.data[2];
1365
1366 size_t srcYStride = img.stride[0];
1367 size_t srcUStride = img.stride[1];
1368 size_t srcVStride = img.stride[1];
1369
1370 // Dump the output buffer is dumping is enabled (debug only)
1371#ifdef FILE_DUMP_ENABLE
1372 FILE* fp_out = mDav1dOutYuvFile;
1373 // if(mOutputBufferIndex % 100 == 0)
1374 ALOGV("output a 8bit picture %dx%d from dav1d "
1375 "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
1376 mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
1377
1378 if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
1379 for (int i = 0; i < mHeight; i++) {
1380 int ret = fwrite((uint8_t*)srcY + i * srcYStride, 1, mWidth, fp_out);
1381 if (ret != mWidth) {
1382 ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
1383 break;
1384 }
1385 }
1386
1387 for (int i = 0; i < mHeight / 2; i++) {
1388 int ret = fwrite((uint8_t*)srcU + i * srcUStride, 1, mWidth / 2, fp_out);
1389 if (ret != mWidth / 2) {
1390 ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
1391 break;
1392 }
1393 }
1394
1395 for (int i = 0; i < mHeight / 2; i++) {
1396 int ret = fwrite((uint8_t*)srcV + i * srcVStride, 1, mWidth / 2, fp_out);
1397 if (ret != mWidth / 2) {
1398 ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
1399 break;
1400 }
1401 }
1402 }
1403#endif
1404 if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
1405 libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
1406 dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
1407 } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
1408 libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
1409 dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
1410 } else {
1411 convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1412 srcVStride, dstYStride, dstUStride, dstVStride, mWidth,
1413 mHeight, isMonochrome);
1414 }
1415 }
1416
1417 dav1d_picture_unref(&img);
1418
1419 finishWork(out_frameIndex, work, std::move(block));
1420 block = nullptr;
1421 return true;
1422}
1423
1424c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
1425 const std::shared_ptr<C2BlockPool>& pool,
1426 const std::unique_ptr<C2Work>& work) {
1427 if (drainMode == NO_DRAIN) {
1428 ALOGW("drain with NO_DRAIN: no-op");
1429 return C2_OK;
1430 }
1431 if (drainMode == DRAIN_CHAIN) {
1432 ALOGW("DRAIN_CHAIN not supported");
1433 return C2_OMITTED;
1434 }
1435
1436 while (outputBuffer(pool, work)) {
1437 }
1438
1439 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
1440 fillEmptyWork(work);
1441 }
1442
1443 return C2_OK;
1444}
1445
1446c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
1447 return drainInternal(drainMode, pool, nullptr);
1448}
1449
1450class C2SoftDav1dFactory : public C2ComponentFactory {
1451 public:
1452 C2SoftDav1dFactory()
1453 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1454 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1455
1456 virtual c2_status_t createComponent(c2_node_id_t id,
1457 std::shared_ptr<C2Component>* const component,
1458 std::function<void(C2Component*)> deleter) override {
1459 *component = std::shared_ptr<C2Component>(
1460 new C2SoftDav1dDec(COMPONENT_NAME, id,
1461 std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1462 deleter);
1463 return C2_OK;
1464 }
1465
1466 virtual c2_status_t createInterface(
1467 c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
1468 std::function<void(C2ComponentInterface*)> deleter) override {
1469 *interface = std::shared_ptr<C2ComponentInterface>(
1470 new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
1471 COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
1472 deleter);
1473 return C2_OK;
1474 }
1475
1476 virtual ~C2SoftDav1dFactory() override = default;
1477
1478 private:
1479 std::shared_ptr<C2ReflectorHelper> mHelper;
1480};
1481
1482} // namespace android
1483
1484__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
1485 ALOGV("in %s", __func__);
1486 return new ::android::C2SoftDav1dFactory();
1487}
1488
1489__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
1490 ::C2ComponentFactory* factory) {
1491 ALOGV("in %s", __func__);
1492 delete factory;
1493}