blob: 3e0dbaf5853c627df0deeb1dd32c933983785973 [file] [log] [blame]
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftGav1Dec"
19#include "C2SoftGav1Dec.h"
20
Harish Mahendrakar48003b52023-11-02 01:22:49 +000021#include <android-base/properties.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070022#include <C2Debug.h>
23#include <C2PlatformSupport.h>
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -080024#include <Codec2BufferUtils.h>
Harish Mahendrakarf5dec502022-04-13 15:53:55 -070025#include <Codec2CommonUtils.h>
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +053026#include <Codec2Mapper.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070027#include <SimpleC2Interface.h>
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -070028#include <libyuv.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32
James Zern11b60d22023-03-02 19:21:44 -080033// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
34#if LIBYUV_VERSION >= 1780
35#include <algorithm>
36#define HAVE_LIBYUV_I410_I210_TO_AB30 1
37#else
38#define HAVE_LIBYUV_I410_I210_TO_AB30 0
39#endif
40
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070041namespace android {
42
Harish Mahendrakar48003b52023-11-02 01:22:49 +000043// Property used to control the number of threads used in the gav1 decoder.
44constexpr char kNumThreadsProperty[] = "debug.c2.gav1.numthreads";
45
Ray Essickc2cc4372019-08-21 14:02:28 -070046// codecname set and passed in as a compile flag from Android.bp
47constexpr char COMPONENT_NAME[] = CODECNAME;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070048
Ray Essick1af2cc52022-01-25 15:59:23 -080049constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
50
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070051class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
52 public:
53 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
54 : SimpleInterface<void>::BaseParams(
55 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
56 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
57 noPrivateBuffers(); // TODO: account for our buffers here.
58 noInputReferences();
59 noOutputReferences();
60 noInputLatency();
61 noTimeStretch();
62
63 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
64 .withConstValue(new C2ComponentAttributesSetting(
65 C2Component::ATTRIB_IS_TEMPORAL))
66 .build());
67
68 addParameter(
69 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
70 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
71 .withFields({
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -080072 C2F(mSize, width).inRange(2, 4096),
73 C2F(mSize, height).inRange(2, 4096),
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070074 })
75 .withSetter(SizeSetter)
76 .build());
77
78 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
79 .withDefault(new C2StreamProfileLevelInfo::input(
80 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
81 .withFields({C2F(mProfileLevel, profile)
82 .oneOf({C2Config::PROFILE_AV1_0,
83 C2Config::PROFILE_AV1_1}),
84 C2F(mProfileLevel, level)
85 .oneOf({
Harish Mahendrakar1ad8c3b2021-06-04 15:42:31 -070086 C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
87 C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
88 C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
89 C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
90 C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
91 C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
92 C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
93 C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070094 })})
95 .withSetter(ProfileLevelSetter, mSize)
96 .build());
97
98 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
99 addParameter(
100 DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
101 .withDefault(mHdr10PlusInfoInput)
102 .withFields({
103 C2F(mHdr10PlusInfoInput, m.value).any(),
104 })
105 .withSetter(Hdr10PlusInfoInputSetter)
106 .build());
107
108 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
109 addParameter(
110 DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
111 .withDefault(mHdr10PlusInfoOutput)
112 .withFields({
113 C2F(mHdr10PlusInfoOutput, m.value).any(),
114 })
115 .withSetter(Hdr10PlusInfoOutputSetter)
116 .build());
117
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530118 // default static info
119 C2HdrStaticMetadataStruct defaultStaticInfo{};
120 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
121 addParameter(
122 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
123 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
124 .withFields({
125 C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
131 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
132 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
133 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
134 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
135 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
136 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
137 })
138 .withSetter(HdrStaticInfoSetter)
139 .build());
140
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700141 addParameter(
142 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
143 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
144 .withFields({
145 C2F(mSize, width).inRange(2, 2048, 2),
146 C2F(mSize, height).inRange(2, 2048, 2),
147 })
148 .withSetter(MaxPictureSizeSetter, mSize)
149 .build());
150
151 addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
Ray Essick1af2cc52022-01-25 15:59:23 -0800152 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700153 .withFields({
154 C2F(mMaxInputSize, value).any(),
155 })
156 .calculatedAs(MaxInputSizeSetter, mMaxSize)
157 .build());
158
159 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
160 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
161 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
162 C2Color::YUV_420);
163 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
164
165 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
166 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
167 C2Color::YUV_420);
168 helper->addStructDescriptors<C2ChromaOffsetStruct>();
169
170 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
171 .withConstValue(defaultColorInfo)
172 .build());
173
174 addParameter(
175 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
176 .withDefault(new C2StreamColorAspectsTuning::output(
177 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
178 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
179 .withFields(
180 {C2F(mDefaultColorAspects, range)
181 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
182 C2F(mDefaultColorAspects, primaries)
183 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
184 C2Color::PRIMARIES_OTHER),
185 C2F(mDefaultColorAspects, transfer)
186 .inRange(C2Color::TRANSFER_UNSPECIFIED,
187 C2Color::TRANSFER_OTHER),
188 C2F(mDefaultColorAspects, matrix)
189 .inRange(C2Color::MATRIX_UNSPECIFIED,
190 C2Color::MATRIX_OTHER)})
191 .withSetter(DefaultColorAspectsSetter)
192 .build());
193
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530194 addParameter(
195 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
196 .withDefault(new C2StreamColorAspectsInfo::input(
197 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
198 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
199 .withFields({
200 C2F(mCodedColorAspects, range).inRange(
201 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
202 C2F(mCodedColorAspects, primaries).inRange(
203 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
204 C2F(mCodedColorAspects, transfer).inRange(
205 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
206 C2F(mCodedColorAspects, matrix).inRange(
207 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
208 })
209 .withSetter(CodedColorAspectsSetter)
210 .build());
211
212 addParameter(
213 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
214 .withDefault(new C2StreamColorAspectsInfo::output(
215 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
216 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
217 .withFields({
218 C2F(mColorAspects, range).inRange(
219 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
220 C2F(mColorAspects, primaries).inRange(
221 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
222 C2F(mColorAspects, transfer).inRange(
223 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
224 C2F(mColorAspects, matrix).inRange(
225 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
226 })
227 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
228 .build());
229
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700230 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
Harish Mahendrakarf5dec502022-04-13 15:53:55 -0700231 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700232 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
233 }
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700234 // If color format surface isn't added to supported formats, there is no way to know
235 // when the color-format is configured to surface. This is necessary to be able to
236 // choose 10-bit format while decoding 10-bit clips in surface mode.
237 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
238
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700239 // TODO: support more formats?
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700240 addParameter(
241 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
242 .withDefault(new C2StreamPixelFormatInfo::output(
243 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
244 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
245 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
246 .build());
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700247 }
248
249 static C2R SizeSetter(bool mayBlock,
250 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
251 C2P<C2StreamPictureSizeInfo::output> &me) {
252 (void)mayBlock;
253 C2R res = C2R::Ok();
254 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
255 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
256 me.set().width = oldMe.v.width;
257 }
258 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
259 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
260 me.set().height = oldMe.v.height;
261 }
262 return res;
263 }
264
265 static C2R MaxPictureSizeSetter(
266 bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
267 const C2P<C2StreamPictureSizeInfo::output> &size) {
268 (void)mayBlock;
269 // TODO: get max width/height from the size's field helpers vs.
270 // hardcoding
271 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
272 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
273 return C2R::Ok();
274 }
275
276 static C2R MaxInputSizeSetter(
277 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
278 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
279 (void)mayBlock;
Ray Essick1af2cc52022-01-25 15:59:23 -0800280 // assume compression ratio of 2, but enforce a floor
281 me.set().value = c2_max((((maxSize.v.width + 63) / 64)
282 * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700283 return C2R::Ok();
284 }
285
286 static C2R DefaultColorAspectsSetter(
287 bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
288 (void)mayBlock;
289 if (me.v.range > C2Color::RANGE_OTHER) {
290 me.set().range = C2Color::RANGE_OTHER;
291 }
292 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
293 me.set().primaries = C2Color::PRIMARIES_OTHER;
294 }
295 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
296 me.set().transfer = C2Color::TRANSFER_OTHER;
297 }
298 if (me.v.matrix > C2Color::MATRIX_OTHER) {
299 me.set().matrix = C2Color::MATRIX_OTHER;
300 }
301 return C2R::Ok();
302 }
303
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530304 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
305 (void)mayBlock;
306 if (me.v.range > C2Color::RANGE_OTHER) {
307 me.set().range = C2Color::RANGE_OTHER;
308 }
309 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
310 me.set().primaries = C2Color::PRIMARIES_OTHER;
311 }
312 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
313 me.set().transfer = C2Color::TRANSFER_OTHER;
314 }
315 if (me.v.matrix > C2Color::MATRIX_OTHER) {
316 me.set().matrix = C2Color::MATRIX_OTHER;
317 }
318 return C2R::Ok();
319 }
320
321 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
322 const C2P<C2StreamColorAspectsTuning::output> &def,
323 const C2P<C2StreamColorAspectsInfo::input> &coded) {
324 (void)mayBlock;
325 // take default values for all unspecified fields, and coded values for specified ones
326 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
327 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
328 ? def.v.primaries : coded.v.primaries;
329 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
330 ? def.v.transfer : coded.v.transfer;
331 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
332 return C2R::Ok();
333 }
334
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700335 static C2R ProfileLevelSetter(
336 bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
337 const C2P<C2StreamPictureSizeInfo::output> &size) {
338 (void)mayBlock;
339 (void)size;
340 (void)me; // TODO: validate
341 return C2R::Ok();
342 }
343
344 std::shared_ptr<C2StreamColorAspectsTuning::output>
345 getDefaultColorAspects_l() {
346 return mDefaultColorAspects;
347 }
348
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530349 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
350 return mColorAspects;
351 }
352
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700353 static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
354 C2P<C2StreamHdr10PlusInfo::input> &me) {
355 (void)mayBlock;
356 (void)me; // TODO: validate
357 return C2R::Ok();
358 }
359
360 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
361 C2P<C2StreamHdr10PlusInfo::output> &me) {
362 (void)mayBlock;
363 (void)me; // TODO: validate
364 return C2R::Ok();
365 }
366
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700367 // unsafe getters
368 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
369
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530370 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
371 (void)mayBlock;
372 if (me.v.mastering.red.x > 1) {
373 me.set().mastering.red.x = 1;
374 }
375 if (me.v.mastering.red.y > 1) {
376 me.set().mastering.red.y = 1;
377 }
378 if (me.v.mastering.green.x > 1) {
379 me.set().mastering.green.x = 1;
380 }
381 if (me.v.mastering.green.y > 1) {
382 me.set().mastering.green.y = 1;
383 }
384 if (me.v.mastering.blue.x > 1) {
385 me.set().mastering.blue.x = 1;
386 }
387 if (me.v.mastering.blue.y > 1) {
388 me.set().mastering.blue.y = 1;
389 }
390 if (me.v.mastering.white.x > 1) {
391 me.set().mastering.white.x = 1;
392 }
393 if (me.v.mastering.white.y > 1) {
394 me.set().mastering.white.y = 1;
395 }
396 if (me.v.mastering.maxLuminance > 65535.0) {
397 me.set().mastering.maxLuminance = 65535.0;
398 }
399 if (me.v.mastering.minLuminance > 6.5535) {
400 me.set().mastering.minLuminance = 6.5535;
401 }
402 if (me.v.maxCll > 65535.0) {
403 me.set().maxCll = 65535.0;
404 }
405 if (me.v.maxFall > 65535.0) {
406 me.set().maxFall = 65535.0;
407 }
408 return C2R::Ok();
409 }
410
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700411 private:
412 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
413 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
414 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
415 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
416 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
417 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
418 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530419 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
420 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700421 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
422 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530423 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700424};
425
426C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
427 const std::shared_ptr<IntfImpl> &intfImpl)
428 : SimpleC2Component(
429 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700430 mIntf(intfImpl),
431 mCodecCtx(nullptr) {
Ray Essick24754942022-04-16 09:50:35 -0700432 mTimeStart = mTimeEnd = systemTime();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700433}
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700434
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700435C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
436
437c2_status_t C2SoftGav1Dec::onInit() {
438 return initDecoder() ? C2_OK : C2_CORRUPTED;
439}
440
441c2_status_t C2SoftGav1Dec::onStop() {
442 mSignalledError = false;
443 mSignalledOutputEos = false;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700444 return C2_OK;
445}
446
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700447void C2SoftGav1Dec::onReset() {
448 (void)onStop();
449 c2_status_t err = onFlush_sm();
450 if (err != C2_OK) {
451 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
452 destroyDecoder();
453 if (!initDecoder()) {
454 ALOGE("Hard reset failed.");
455 }
456 }
457}
458
459void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
460
461c2_status_t C2SoftGav1Dec::onFlush_sm() {
James Zernb7aee6e2020-06-26 13:49:53 -0700462 Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700463 if (status != kLibgav1StatusOk) {
464 ALOGE("Failed to flush av1 decoder. status: %d.", status);
465 return C2_CORRUPTED;
466 }
467
468 // Dequeue frame (if any) that was enqueued previously.
469 const libgav1::DecoderBuffer *buffer;
470 status = mCodecCtx->DequeueFrame(&buffer);
James Zernb7aee6e2020-06-26 13:49:53 -0700471 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700472 ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
473 status);
474 return C2_CORRUPTED;
475 }
476
477 mSignalledError = false;
478 mSignalledOutputEos = false;
479
480 return C2_OK;
481}
482
483static int GetCPUCoreCount() {
484 int cpuCoreCount = 1;
485#if defined(_SC_NPROCESSORS_ONLN)
486 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
487#else
488 // _SC_NPROC_ONLN must be defined...
489 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
490#endif
491 CHECK(cpuCoreCount >= 1);
492 ALOGV("Number of CPU cores: %d", cpuCoreCount);
493 return cpuCoreCount;
494}
495
496bool C2SoftGav1Dec::initDecoder() {
497 mSignalledError = false;
498 mSignalledOutputEos = false;
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700499 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700500 {
501 IntfImpl::Lock lock = mIntf->lock();
502 mPixelFormatInfo = mIntf->getPixelFormat_l();
503 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700504 mCodecCtx.reset(new libgav1::Decoder());
505
506 if (mCodecCtx == nullptr) {
507 ALOGE("mCodecCtx is null");
508 return false;
509 }
510
511 libgav1::DecoderSettings settings = {};
512 settings.threads = GetCPUCoreCount();
Harish Mahendrakar48003b52023-11-02 01:22:49 +0000513 int32_t numThreads = android::base::GetIntProperty(kNumThreadsProperty, 0);
514 if (numThreads > 0 && numThreads < settings.threads) {
515 settings.threads = numThreads;
516 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700517
Vignesh Venkatasubramanian61ba2cf2019-06-24 10:04:00 -0700518 ALOGV("Using libgav1 AV1 software decoder.");
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700519 Libgav1StatusCode status = mCodecCtx->Init(&settings);
520 if (status != kLibgav1StatusOk) {
521 ALOGE("av1 decoder failed to initialize. status: %d.", status);
522 return false;
523 }
524
525 return true;
526}
527
528void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
529
530void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
531 uint32_t flags = 0;
532 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
533 flags |= C2FrameData::FLAG_END_OF_STREAM;
534 ALOGV("signalling eos");
535 }
536 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
537 work->worklets.front()->output.buffers.clear();
538 work->worklets.front()->output.ordinal = work->input.ordinal;
539 work->workletsProcessed = 1u;
540}
541
542void C2SoftGav1Dec::finishWork(uint64_t index,
543 const std::unique_ptr<C2Work> &work,
544 const std::shared_ptr<C2GraphicBlock> &block) {
545 std::shared_ptr<C2Buffer> buffer =
546 createGraphicBuffer(block, C2Rect(mWidth, mHeight));
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530547 {
548 IntfImpl::Lock lock = mIntf->lock();
549 buffer->setInfo(mIntf->getColorAspects_l());
550 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700551 auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
552 uint32_t flags = 0;
553 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
554 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
555 flags |= C2FrameData::FLAG_END_OF_STREAM;
556 ALOGV("signalling eos");
557 }
558 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
559 work->worklets.front()->output.buffers.clear();
560 work->worklets.front()->output.buffers.push_back(buffer);
561 work->worklets.front()->output.ordinal = work->input.ordinal;
562 work->workletsProcessed = 1u;
563 };
564 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
565 fillWork(work);
566 } else {
567 finish(index, fillWork);
568 }
569}
570
571void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
572 const std::shared_ptr<C2BlockPool> &pool) {
573 work->result = C2_OK;
574 work->workletsProcessed = 0u;
575 work->worklets.front()->output.configUpdate.clear();
576 work->worklets.front()->output.flags = work->input.flags;
577 if (mSignalledError || mSignalledOutputEos) {
578 work->result = C2_BAD_VALUE;
579 return;
580 }
581
582 size_t inOffset = 0u;
583 size_t inSize = 0u;
584 C2ReadView rView = mDummyReadView;
585 if (!work->input.buffers.empty()) {
586 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
587 inSize = rView.capacity();
588 if (inSize && rView.error()) {
589 ALOGE("read view map failed %d", rView.error());
590 work->result = C2_CORRUPTED;
591 return;
592 }
593 }
594
595 bool codecConfig =
596 ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
597 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
598
599 ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
600 (int)work->input.ordinal.timestamp.peeku(),
601 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
602
603 if (codecConfig) {
604 fillEmptyWork(work);
605 return;
606 }
607
608 int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
609 if (inSize) {
610 uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700611
Ray Essick24754942022-04-16 09:50:35 -0700612 mTimeStart = systemTime();
613 nsecs_t delay = mTimeStart - mTimeEnd;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700614
615 const Libgav1StatusCode status =
James Zernb7aee6e2020-06-26 13:49:53 -0700616 mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
617 /*buffer_private_data=*/nullptr);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700618
Ray Essick24754942022-04-16 09:50:35 -0700619 mTimeEnd = systemTime();
620 nsecs_t decodeTime = mTimeEnd - mTimeStart;
621 ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700622
623 if (status != kLibgav1StatusOk) {
624 ALOGE("av1 decoder failed to decode frame. status: %d.", status);
625 work->result = C2_CORRUPTED;
626 work->workletsProcessed = 1u;
627 mSignalledError = true;
628 return;
629 }
630
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700631 }
632
633 (void)outputBuffer(pool, work);
634
635 if (eos) {
636 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
637 mSignalledOutputEos = true;
638 } else if (!inSize) {
639 fillEmptyWork(work);
640 }
641}
642
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530643void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
644 const std::unique_ptr<C2Work> &work) {
645 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
646 bool infoPresent = false;
647 if (buffer->has_hdr_mdcv) {
648 // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
649 hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
650 hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
651
652 hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
653 hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
654
655 hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
656 hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
657
658 // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
659 hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
660 hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
661
662 // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
663 hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
664 // hdr_mdcv.luminance_min is in 18.14 format.
665 hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
666 infoPresent = true;
667 }
668
669 if (buffer->has_hdr_cll) {
670 hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
671 hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
672 infoPresent = true;
673 }
674 // config if static info has changed
675 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
676 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
677 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
678 }
679}
680
681void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
682 const std::unique_ptr<C2Work> &work) {
683 if (buffer->has_itut_t35) {
684 std::vector<uint8_t> payload;
685 size_t payloadSize = buffer->itut_t35.payload_size;
686 if (payloadSize > 0) {
687 payload.push_back(buffer->itut_t35.country_code);
688 if (buffer->itut_t35.country_code == 0xFF) {
689 payload.push_back(buffer->itut_t35.country_code_extension_byte);
690 }
691 payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
692 buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
693 }
694
695 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
696 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
697 if (!hdr10PlusInfo) {
698 ALOGE("Hdr10PlusInfo allocation failed");
699 mSignalledError = true;
700 work->result = C2_NO_MEMORY;
701 return;
702 }
703 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
704
705 // config if hdr10Plus info has changed
706 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
707 mHdr10PlusInfo = std::move(hdr10PlusInfo);
708 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
709 }
710 }
711}
712
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530713void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
714 VuiColorAspects vuiColorAspects;
715 vuiColorAspects.primaries = buffer->color_primary;
716 vuiColorAspects.transfer = buffer->transfer_characteristics;
717 vuiColorAspects.coeffs = buffer->matrix_coefficients;
718 vuiColorAspects.fullRange = buffer->color_range;
719
720 // convert vui aspects to C2 values if changed
721 if (!(vuiColorAspects == mBitstreamColorAspects)) {
722 mBitstreamColorAspects = vuiColorAspects;
723 ColorAspects sfAspects;
724 C2StreamColorAspectsInfo::input codedAspects = { 0u };
725 ColorUtils::convertIsoColorAspectsToCodecAspects(
726 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
727 vuiColorAspects.fullRange, sfAspects);
728 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
729 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
730 }
731 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
732 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
733 }
734 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
735 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
736 }
737 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
738 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
739 }
740 std::vector<std::unique_ptr<C2SettingResult>> failures;
741 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
742 }
743}
744
James Zern0958b8b2023-02-17 21:48:08 -0800745void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
746 mSignalledError = true;
747 work->result = error;
748 work->workletsProcessed = 1u;
749}
750
751bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
752 if (size > mTmpFrameBufferSize) {
753 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
754 if (mTmpFrameBuffer == nullptr) {
755 mTmpFrameBufferSize = 0;
756 return false;
757 }
758 mTmpFrameBufferSize = size;
759 }
760 return true;
761}
762
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700763bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
764 const std::unique_ptr<C2Work> &work) {
765 if (!(work && pool)) return false;
766
767 const libgav1::DecoderBuffer *buffer;
768 const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
769
James Zernb7aee6e2020-06-26 13:49:53 -0700770 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700771 ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
772 return false;
773 }
774
James Zernb7aee6e2020-06-26 13:49:53 -0700775 // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
776 // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
777 // of two things:
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700778 // - The EnqueueFrame() call was either a flush (called with nullptr).
779 // - The enqueued frame did not have any displayable frames.
780 if (!buffer) {
781 return false;
782 }
783
Vignesh Venkatasubramanianabbb2172023-06-05 21:35:31 +0000784 if (buffer->bitdepth > 10) {
785 ALOGE("bitdepth %d is not supported", buffer->bitdepth);
786 mSignalledError = true;
787 work->workletsProcessed = 1u;
788 work->result = C2_CORRUPTED;
789 return false;
790 }
791
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700792 const int width = buffer->displayed_width[0];
793 const int height = buffer->displayed_height[0];
794 if (width != mWidth || height != mHeight) {
795 mWidth = width;
796 mHeight = height;
797
798 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
799 std::vector<std::unique_ptr<C2SettingResult>> failures;
800 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
801 if (err == C2_OK) {
802 work->worklets.front()->output.configUpdate.push_back(
803 C2Param::Copy(size));
804 } else {
805 ALOGE("Config update size failed");
806 mSignalledError = true;
807 work->result = C2_CORRUPTED;
808 work->workletsProcessed = 1u;
809 return false;
810 }
811 }
812
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530813 getVuiParams(buffer);
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530814 getHDRStaticParams(buffer, work);
815 getHDR10PlusInfoData(buffer, work);
816
James Zern0958b8b2023-02-17 21:48:08 -0800817#if LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800818 if (buffer->bitdepth == 10 &&
819 !(buffer->image_format == libgav1::kImageFormatYuv420 ||
820 buffer->image_format == libgav1::kImageFormatMonochrome400)) {
821 ALOGE("image_format %d not supported for 10bit", buffer->image_format);
822 mSignalledError = true;
823 work->workletsProcessed = 1u;
824 work->result = C2_CORRUPTED;
825 return false;
826 }
James Zern0958b8b2023-02-17 21:48:08 -0800827#endif
James Zern2c90f852023-02-17 21:42:27 -0800828
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800829 const bool isMonochrome =
830 buffer->image_format == libgav1::kImageFormatMonochrome400;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700831
832 std::shared_ptr<C2GraphicBlock> block;
833 uint32_t format = HAL_PIXEL_FORMAT_YV12;
Lajos Molnar45109a32022-06-03 09:41:48 -0700834 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700835 if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700836 IntfImpl::Lock lock = mIntf->lock();
Lajos Molnar45109a32022-06-03 09:41:48 -0700837 codedColorAspects = mIntf->getColorAspects_l();
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800838 bool allowRGBA1010102 = false;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530839 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
840 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
841 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800842 allowRGBA1010102 = true;
843 }
844 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
James Zern11b60d22023-03-02 19:21:44 -0800845#if !HAVE_LIBYUV_I410_I210_TO_AB30
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800846 if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
847 (buffer->image_format != libgav1::kImageFormatYuv420)) {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800848 ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800849 mSignalledError = true;
850 work->result = C2_OMITTED;
851 work->workletsProcessed = 1u;
852 return false;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700853 }
James Zern11b60d22023-03-02 19:21:44 -0800854#endif
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700855 }
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700856
857 if (mHalPixelFormat != format) {
858 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
859 std::vector<std::unique_ptr<C2SettingResult>> failures;
860 c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
861 if (err == C2_OK) {
862 work->worklets.front()->output.configUpdate.push_back(
863 C2Param::Copy(pixelFormat));
864 } else {
865 ALOGE("Config update pixelFormat failed");
866 mSignalledError = true;
867 work->workletsProcessed = 1u;
868 work->result = C2_CORRUPTED;
869 return UNKNOWN_ERROR;
870 }
871 mHalPixelFormat = format;
872 }
873
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700874 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
875
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -0800876 // We always create a graphic block that is width aligned to 16 and height
877 // aligned to 2. We set the correct "crop" value of the image in the call to
878 // createGraphicBuffer() by setting the correct image dimensions.
879 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
880 align(mHeight, 2), format, usage,
881 &block);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700882
883 if (err != C2_OK) {
884 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
885 work->result = err;
886 return false;
887 }
888
889 C2GraphicView wView = block->map().get();
890
891 if (wView.error()) {
892 ALOGE("graphic view map failed %d", wView.error());
893 work->result = C2_CORRUPTED;
894 return false;
895 }
896
897 ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
898 block->height(), mWidth, mHeight, (int)buffer->user_private_data);
899
ming.zhouac19c3d2019-10-11 11:14:07 +0800900 uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
901 uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
902 uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700903
ming.zhouac19c3d2019-10-11 11:14:07 +0800904 C2PlanarLayout layout = wView.layout();
905 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
Vignesh Venkatasubramanian47b1d222023-01-12 21:45:40 +0000906 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
907 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
ming.zhouac19c3d2019-10-11 11:14:07 +0800908
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700909 if (buffer->bitdepth == 10) {
910 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
911 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
912 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -0800913 size_t srcYStride = buffer->stride[0] / 2;
914 size_t srcUStride = buffer->stride[1] / 2;
915 size_t srcVStride = buffer->stride[2] / 2;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700916
917 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
James Zern11b60d22023-03-02 19:21:44 -0800918 bool processed = false;
919#if HAVE_LIBYUV_I410_I210_TO_AB30
920 if (buffer->image_format == libgav1::kImageFormatYuv444) {
921 libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
922 dstY, dstYStride, &libyuv::kYuvV2020Constants,
923 mWidth, mHeight);
924 processed = true;
925 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
926 libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
927 dstY, dstYStride, &libyuv::kYuvV2020Constants,
928 mWidth, mHeight);
929 processed = true;
930 }
931#endif // HAVE_LIBYUV_I410_I210_TO_AB30
932 if (!processed) {
933 if (isMonochrome) {
934 const size_t tmpSize = mWidth;
935 const bool needFill = tmpSize > mTmpFrameBufferSize;
936 if (!allocTmpFrameBuffer(tmpSize)) {
937 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
938 setError(work, C2_NO_MEMORY);
939 return false;
940 }
941 srcU = srcV = mTmpFrameBuffer.get();
942 srcUStride = srcVStride = 0;
943 if (needFill) {
944 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
945 }
946 }
947 convertYUV420Planar16ToY410OrRGBA1010102(
948 (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
949 srcUStride, srcVStride,
950 dstYStride / sizeof(uint32_t), mWidth, mHeight,
951 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
952 }
Harish Mahendrakar1b1aef22021-12-30 19:12:51 -0800953 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
James Zern0958b8b2023-02-17 21:48:08 -0800954 dstYStride /= 2;
955 dstUStride /= 2;
956 dstVStride /= 2;
957#if LIBYUV_VERSION >= 1779
958 if (buffer->image_format == libgav1::kImageFormatYuv444 ||
959 buffer->image_format == libgav1::kImageFormatYuv422) {
960 // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
961 // libyuv::I210ToP010 when they are available.
962 // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
963 // guarantees.
964 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
965 if (!allocTmpFrameBuffer(tmpSize)) {
966 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
967 setError(work, C2_NO_MEMORY);
968 return false;
969 }
970 uint16_t *const tmpY = mTmpFrameBuffer.get();
971 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
972 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
973 if (buffer->image_format == libgav1::kImageFormatYuv444) {
974 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
975 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
976 mWidth, mHeight);
977 } else {
978 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
979 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
980 mWidth, mHeight);
981 }
982 libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
983 (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
984 mWidth, mHeight);
985 } else {
986 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
987 srcYStride, srcUStride, srcVStride, dstYStride,
988 dstUStride, mWidth, mHeight, isMonochrome);
989 }
990#else // LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800991 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
James Zern0958b8b2023-02-17 21:48:08 -0800992 srcYStride, srcUStride, srcVStride, dstYStride,
993 dstUStride, mWidth, mHeight, isMonochrome);
994#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700995 } else {
James Zern0958b8b2023-02-17 21:48:08 -0800996#if LIBYUV_VERSION >= 1779
997 if (buffer->image_format == libgav1::kImageFormatYuv444) {
998 // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
999 // it's available.
1000 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1001 if (!allocTmpFrameBuffer(tmpSize)) {
1002 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1003 setError(work, C2_NO_MEMORY);
1004 return false;
1005 }
1006 uint16_t *const tmpY = mTmpFrameBuffer.get();
1007 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
1008 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1009 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1010 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1011 mWidth, mHeight);
1012 libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1013 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1014 mWidth, mHeight);
1015 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1016 libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1017 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1018 mWidth, mHeight);
1019 } else {
1020 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1021 srcUStride, srcVStride, dstYStride, dstUStride,
1022 mWidth, mHeight, isMonochrome);
1023 }
1024#else // LIBYUV_VERSION < 1779
1025 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1026 srcUStride, srcVStride, dstYStride, dstUStride,
1027 mWidth, mHeight, isMonochrome);
1028#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001029 }
1030 } else {
1031 const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
1032 const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
1033 const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -08001034 size_t srcYStride = buffer->stride[0];
1035 size_t srcUStride = buffer->stride[1];
1036 size_t srcVStride = buffer->stride[2];
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -07001037
1038 if (buffer->image_format == libgav1::kImageFormatYuv444) {
1039 libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1040 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1041 mWidth, mHeight);
1042 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1043 libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1044 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1045 mWidth, mHeight);
1046 } else {
1047 convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1048 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1049 isMonochrome);
1050 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001051 }
1052 finishWork(buffer->user_private_data, work, std::move(block));
1053 block = nullptr;
1054 return true;
1055}
1056
1057c2_status_t C2SoftGav1Dec::drainInternal(
1058 uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
1059 const std::unique_ptr<C2Work> &work) {
1060 if (drainMode == NO_DRAIN) {
1061 ALOGW("drain with NO_DRAIN: no-op");
1062 return C2_OK;
1063 }
1064 if (drainMode == DRAIN_CHAIN) {
1065 ALOGW("DRAIN_CHAIN not supported");
1066 return C2_OMITTED;
1067 }
1068
James Zernb7aee6e2020-06-26 13:49:53 -07001069 const Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001070 if (status != kLibgav1StatusOk) {
1071 ALOGE("Failed to flush av1 decoder. status: %d.", status);
1072 return C2_CORRUPTED;
1073 }
1074
1075 while (outputBuffer(pool, work)) {
1076 }
1077
1078 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
1079 work->workletsProcessed == 0u) {
1080 fillEmptyWork(work);
1081 }
1082
1083 return C2_OK;
1084}
1085
1086c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
1087 const std::shared_ptr<C2BlockPool> &pool) {
1088 return drainInternal(drainMode, pool, nullptr);
1089}
1090
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001091class C2SoftGav1Factory : public C2ComponentFactory {
1092 public:
1093 C2SoftGav1Factory()
1094 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1095 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1096
1097 virtual c2_status_t createComponent(
1098 c2_node_id_t id, std::shared_ptr<C2Component> *const component,
1099 std::function<void(C2Component *)> deleter) override {
1100 *component = std::shared_ptr<C2Component>(
1101 new C2SoftGav1Dec(COMPONENT_NAME, id,
1102 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1103 deleter);
1104 return C2_OK;
1105 }
1106
1107 virtual c2_status_t createInterface(
1108 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1109 std::function<void(C2ComponentInterface *)> deleter) override {
1110 *interface = std::shared_ptr<C2ComponentInterface>(
1111 new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
1112 COMPONENT_NAME, id,
1113 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1114 deleter);
1115 return C2_OK;
1116 }
1117
1118 virtual ~C2SoftGav1Factory() override = default;
1119
1120 private:
1121 std::shared_ptr<C2ReflectorHelper> mHelper;
1122};
1123
1124} // namespace android
1125
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001126__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001127extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1128 ALOGV("in %s", __func__);
1129 return new ::android::C2SoftGav1Factory();
1130}
1131
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001132__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001133extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1134 ALOGV("in %s", __func__);
1135 delete factory;
1136}