blob: f056759c0c7f37e46cb762dc3deb525ef7c432a9 [file] [log] [blame]
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftGav1Dec"
19#include "C2SoftGav1Dec.h"
20
Harish Mahendrakar48003b52023-11-02 01:22:49 +000021#include <android-base/properties.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070022#include <C2Debug.h>
23#include <C2PlatformSupport.h>
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -080024#include <Codec2BufferUtils.h>
Harish Mahendrakarf5dec502022-04-13 15:53:55 -070025#include <Codec2CommonUtils.h>
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +053026#include <Codec2Mapper.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070027#include <SimpleC2Interface.h>
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -070028#include <libyuv.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070029#include <log/log.h>
30#include <media/stagefright/foundation/AUtils.h>
31#include <media/stagefright/foundation/MediaDefs.h>
32
James Zern11b60d22023-03-02 19:21:44 -080033// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
34#if LIBYUV_VERSION >= 1780
35#include <algorithm>
36#define HAVE_LIBYUV_I410_I210_TO_AB30 1
37#else
38#define HAVE_LIBYUV_I410_I210_TO_AB30 0
39#endif
40
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070041namespace android {
42
Harish Mahendrakar48003b52023-11-02 01:22:49 +000043// Property used to control the number of threads used in the gav1 decoder.
44constexpr char kNumThreadsProperty[] = "debug.c2.gav1.numthreads";
45
Ray Essickc2cc4372019-08-21 14:02:28 -070046// codecname set and passed in as a compile flag from Android.bp
47constexpr char COMPONENT_NAME[] = CODECNAME;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070048
Ray Essick1af2cc52022-01-25 15:59:23 -080049constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
50
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070051class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
52 public:
53 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
54 : SimpleInterface<void>::BaseParams(
55 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
56 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
57 noPrivateBuffers(); // TODO: account for our buffers here.
58 noInputReferences();
59 noOutputReferences();
60 noInputLatency();
61 noTimeStretch();
62
63 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
64 .withConstValue(new C2ComponentAttributesSetting(
65 C2Component::ATTRIB_IS_TEMPORAL))
66 .build());
67
68 addParameter(
69 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
70 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
71 .withFields({
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -080072 C2F(mSize, width).inRange(2, 4096),
73 C2F(mSize, height).inRange(2, 4096),
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070074 })
75 .withSetter(SizeSetter)
76 .build());
77
78 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
79 .withDefault(new C2StreamProfileLevelInfo::input(
80 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
81 .withFields({C2F(mProfileLevel, profile)
82 .oneOf({C2Config::PROFILE_AV1_0,
83 C2Config::PROFILE_AV1_1}),
84 C2F(mProfileLevel, level)
85 .oneOf({
Harish Mahendrakar1ad8c3b2021-06-04 15:42:31 -070086 C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
87 C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
88 C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
89 C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
90 C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
91 C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
92 C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
93 C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070094 })})
95 .withSetter(ProfileLevelSetter, mSize)
96 .build());
97
98 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
99 addParameter(
100 DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
101 .withDefault(mHdr10PlusInfoInput)
102 .withFields({
103 C2F(mHdr10PlusInfoInput, m.value).any(),
104 })
105 .withSetter(Hdr10PlusInfoInputSetter)
106 .build());
107
108 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
109 addParameter(
110 DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
111 .withDefault(mHdr10PlusInfoOutput)
112 .withFields({
113 C2F(mHdr10PlusInfoOutput, m.value).any(),
114 })
115 .withSetter(Hdr10PlusInfoOutputSetter)
116 .build());
117
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530118 // default static info
119 C2HdrStaticMetadataStruct defaultStaticInfo{};
120 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
121 addParameter(
122 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
123 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
124 .withFields({
125 C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
131 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
132 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
133 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
134 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
135 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
136 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
137 })
138 .withSetter(HdrStaticInfoSetter)
139 .build());
140
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700141 addParameter(
142 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
143 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
144 .withFields({
145 C2F(mSize, width).inRange(2, 2048, 2),
146 C2F(mSize, height).inRange(2, 2048, 2),
147 })
148 .withSetter(MaxPictureSizeSetter, mSize)
149 .build());
150
151 addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
Ray Essick1af2cc52022-01-25 15:59:23 -0800152 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700153 .withFields({
154 C2F(mMaxInputSize, value).any(),
155 })
156 .calculatedAs(MaxInputSizeSetter, mMaxSize)
157 .build());
158
159 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
160 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
161 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
162 C2Color::YUV_420);
163 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
164
165 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
166 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
167 C2Color::YUV_420);
168 helper->addStructDescriptors<C2ChromaOffsetStruct>();
169
170 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
171 .withConstValue(defaultColorInfo)
172 .build());
173
174 addParameter(
175 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
176 .withDefault(new C2StreamColorAspectsTuning::output(
177 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
178 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
179 .withFields(
180 {C2F(mDefaultColorAspects, range)
181 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
182 C2F(mDefaultColorAspects, primaries)
183 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
184 C2Color::PRIMARIES_OTHER),
185 C2F(mDefaultColorAspects, transfer)
186 .inRange(C2Color::TRANSFER_UNSPECIFIED,
187 C2Color::TRANSFER_OTHER),
188 C2F(mDefaultColorAspects, matrix)
189 .inRange(C2Color::MATRIX_UNSPECIFIED,
190 C2Color::MATRIX_OTHER)})
191 .withSetter(DefaultColorAspectsSetter)
192 .build());
193
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530194 addParameter(
195 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
196 .withDefault(new C2StreamColorAspectsInfo::input(
197 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
198 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
199 .withFields({
200 C2F(mCodedColorAspects, range).inRange(
201 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
202 C2F(mCodedColorAspects, primaries).inRange(
203 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
204 C2F(mCodedColorAspects, transfer).inRange(
205 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
206 C2F(mCodedColorAspects, matrix).inRange(
207 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
208 })
209 .withSetter(CodedColorAspectsSetter)
210 .build());
211
212 addParameter(
213 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
214 .withDefault(new C2StreamColorAspectsInfo::output(
215 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
216 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
217 .withFields({
218 C2F(mColorAspects, range).inRange(
219 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
220 C2F(mColorAspects, primaries).inRange(
221 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
222 C2F(mColorAspects, transfer).inRange(
223 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
224 C2F(mColorAspects, matrix).inRange(
225 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
226 })
227 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
228 .build());
229
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700230 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
Harish Mahendrakarf5dec502022-04-13 15:53:55 -0700231 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700232 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
233 }
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700234 // If color format surface isn't added to supported formats, there is no way to know
235 // when the color-format is configured to surface. This is necessary to be able to
236 // choose 10-bit format while decoding 10-bit clips in surface mode.
237 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
238
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700239 // TODO: support more formats?
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700240 addParameter(
241 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
242 .withDefault(new C2StreamPixelFormatInfo::output(
243 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
244 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
245 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
246 .build());
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700247 }
248
249 static C2R SizeSetter(bool mayBlock,
250 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
251 C2P<C2StreamPictureSizeInfo::output> &me) {
252 (void)mayBlock;
253 C2R res = C2R::Ok();
254 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
255 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
256 me.set().width = oldMe.v.width;
257 }
258 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
259 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
260 me.set().height = oldMe.v.height;
261 }
262 return res;
263 }
264
265 static C2R MaxPictureSizeSetter(
266 bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
267 const C2P<C2StreamPictureSizeInfo::output> &size) {
268 (void)mayBlock;
269 // TODO: get max width/height from the size's field helpers vs.
270 // hardcoding
271 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
272 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
273 return C2R::Ok();
274 }
275
276 static C2R MaxInputSizeSetter(
277 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
278 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
279 (void)mayBlock;
Ray Essick1af2cc52022-01-25 15:59:23 -0800280 // assume compression ratio of 2, but enforce a floor
281 me.set().value = c2_max((((maxSize.v.width + 63) / 64)
282 * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700283 return C2R::Ok();
284 }
285
286 static C2R DefaultColorAspectsSetter(
287 bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
288 (void)mayBlock;
289 if (me.v.range > C2Color::RANGE_OTHER) {
290 me.set().range = C2Color::RANGE_OTHER;
291 }
292 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
293 me.set().primaries = C2Color::PRIMARIES_OTHER;
294 }
295 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
296 me.set().transfer = C2Color::TRANSFER_OTHER;
297 }
298 if (me.v.matrix > C2Color::MATRIX_OTHER) {
299 me.set().matrix = C2Color::MATRIX_OTHER;
300 }
301 return C2R::Ok();
302 }
303
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530304 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
305 (void)mayBlock;
306 if (me.v.range > C2Color::RANGE_OTHER) {
307 me.set().range = C2Color::RANGE_OTHER;
308 }
309 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
310 me.set().primaries = C2Color::PRIMARIES_OTHER;
311 }
312 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
313 me.set().transfer = C2Color::TRANSFER_OTHER;
314 }
315 if (me.v.matrix > C2Color::MATRIX_OTHER) {
316 me.set().matrix = C2Color::MATRIX_OTHER;
317 }
318 return C2R::Ok();
319 }
320
321 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
322 const C2P<C2StreamColorAspectsTuning::output> &def,
323 const C2P<C2StreamColorAspectsInfo::input> &coded) {
324 (void)mayBlock;
325 // take default values for all unspecified fields, and coded values for specified ones
326 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
327 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
328 ? def.v.primaries : coded.v.primaries;
329 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
330 ? def.v.transfer : coded.v.transfer;
331 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
332 return C2R::Ok();
333 }
334
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700335 static C2R ProfileLevelSetter(
336 bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
337 const C2P<C2StreamPictureSizeInfo::output> &size) {
338 (void)mayBlock;
339 (void)size;
340 (void)me; // TODO: validate
341 return C2R::Ok();
342 }
343
344 std::shared_ptr<C2StreamColorAspectsTuning::output>
345 getDefaultColorAspects_l() {
346 return mDefaultColorAspects;
347 }
348
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530349 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
350 return mColorAspects;
351 }
352
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700353 static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
354 C2P<C2StreamHdr10PlusInfo::input> &me) {
355 (void)mayBlock;
356 (void)me; // TODO: validate
357 return C2R::Ok();
358 }
359
360 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
361 C2P<C2StreamHdr10PlusInfo::output> &me) {
362 (void)mayBlock;
363 (void)me; // TODO: validate
364 return C2R::Ok();
365 }
366
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700367 // unsafe getters
368 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
369
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530370 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
371 (void)mayBlock;
372 if (me.v.mastering.red.x > 1) {
373 me.set().mastering.red.x = 1;
374 }
375 if (me.v.mastering.red.y > 1) {
376 me.set().mastering.red.y = 1;
377 }
378 if (me.v.mastering.green.x > 1) {
379 me.set().mastering.green.x = 1;
380 }
381 if (me.v.mastering.green.y > 1) {
382 me.set().mastering.green.y = 1;
383 }
384 if (me.v.mastering.blue.x > 1) {
385 me.set().mastering.blue.x = 1;
386 }
387 if (me.v.mastering.blue.y > 1) {
388 me.set().mastering.blue.y = 1;
389 }
390 if (me.v.mastering.white.x > 1) {
391 me.set().mastering.white.x = 1;
392 }
393 if (me.v.mastering.white.y > 1) {
394 me.set().mastering.white.y = 1;
395 }
396 if (me.v.mastering.maxLuminance > 65535.0) {
397 me.set().mastering.maxLuminance = 65535.0;
398 }
399 if (me.v.mastering.minLuminance > 6.5535) {
400 me.set().mastering.minLuminance = 6.5535;
401 }
402 if (me.v.maxCll > 65535.0) {
403 me.set().maxCll = 65535.0;
404 }
405 if (me.v.maxFall > 65535.0) {
406 me.set().maxFall = 65535.0;
407 }
408 return C2R::Ok();
409 }
410
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700411 private:
412 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
413 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
414 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
415 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
416 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
417 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
418 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530419 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
420 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700421 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
422 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530423 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700424};
425
426C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
427 const std::shared_ptr<IntfImpl> &intfImpl)
428 : SimpleC2Component(
429 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700430 mIntf(intfImpl),
431 mCodecCtx(nullptr) {
Ray Essick24754942022-04-16 09:50:35 -0700432 mTimeStart = mTimeEnd = systemTime();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700433}
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700434
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700435C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
436
437c2_status_t C2SoftGav1Dec::onInit() {
438 return initDecoder() ? C2_OK : C2_CORRUPTED;
439}
440
441c2_status_t C2SoftGav1Dec::onStop() {
442 mSignalledError = false;
443 mSignalledOutputEos = false;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700444 return C2_OK;
445}
446
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700447void C2SoftGav1Dec::onReset() {
448 (void)onStop();
449 c2_status_t err = onFlush_sm();
450 if (err != C2_OK) {
451 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
452 destroyDecoder();
453 if (!initDecoder()) {
454 ALOGE("Hard reset failed.");
455 }
456 }
457}
458
459void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
460
461c2_status_t C2SoftGav1Dec::onFlush_sm() {
James Zernb7aee6e2020-06-26 13:49:53 -0700462 Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700463 if (status != kLibgav1StatusOk) {
464 ALOGE("Failed to flush av1 decoder. status: %d.", status);
465 return C2_CORRUPTED;
466 }
467
468 // Dequeue frame (if any) that was enqueued previously.
469 const libgav1::DecoderBuffer *buffer;
470 status = mCodecCtx->DequeueFrame(&buffer);
James Zernb7aee6e2020-06-26 13:49:53 -0700471 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700472 ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
473 status);
474 return C2_CORRUPTED;
475 }
476
477 mSignalledError = false;
478 mSignalledOutputEos = false;
479
480 return C2_OK;
481}
482
483static int GetCPUCoreCount() {
484 int cpuCoreCount = 1;
485#if defined(_SC_NPROCESSORS_ONLN)
486 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
487#else
488 // _SC_NPROC_ONLN must be defined...
489 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
490#endif
491 CHECK(cpuCoreCount >= 1);
492 ALOGV("Number of CPU cores: %d", cpuCoreCount);
493 return cpuCoreCount;
494}
495
496bool C2SoftGav1Dec::initDecoder() {
497 mSignalledError = false;
498 mSignalledOutputEos = false;
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700499 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700500 {
501 IntfImpl::Lock lock = mIntf->lock();
502 mPixelFormatInfo = mIntf->getPixelFormat_l();
503 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700504 mCodecCtx.reset(new libgav1::Decoder());
505
506 if (mCodecCtx == nullptr) {
507 ALOGE("mCodecCtx is null");
508 return false;
509 }
510
511 libgav1::DecoderSettings settings = {};
512 settings.threads = GetCPUCoreCount();
Harish Mahendrakar48003b52023-11-02 01:22:49 +0000513 int32_t numThreads = android::base::GetIntProperty(kNumThreadsProperty, 0);
514 if (numThreads > 0 && numThreads < settings.threads) {
515 settings.threads = numThreads;
516 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700517
Vignesh Venkatasubramanian61ba2cf2019-06-24 10:04:00 -0700518 ALOGV("Using libgav1 AV1 software decoder.");
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700519 Libgav1StatusCode status = mCodecCtx->Init(&settings);
520 if (status != kLibgav1StatusOk) {
521 ALOGE("av1 decoder failed to initialize. status: %d.", status);
522 return false;
523 }
524
525 return true;
526}
527
528void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
529
530void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
531 uint32_t flags = 0;
532 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
533 flags |= C2FrameData::FLAG_END_OF_STREAM;
534 ALOGV("signalling eos");
535 }
536 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
537 work->worklets.front()->output.buffers.clear();
538 work->worklets.front()->output.ordinal = work->input.ordinal;
539 work->workletsProcessed = 1u;
540}
541
542void C2SoftGav1Dec::finishWork(uint64_t index,
543 const std::unique_ptr<C2Work> &work,
544 const std::shared_ptr<C2GraphicBlock> &block) {
545 std::shared_ptr<C2Buffer> buffer =
546 createGraphicBuffer(block, C2Rect(mWidth, mHeight));
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530547 {
548 IntfImpl::Lock lock = mIntf->lock();
549 buffer->setInfo(mIntf->getColorAspects_l());
550 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700551 auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
552 uint32_t flags = 0;
553 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
554 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
555 flags |= C2FrameData::FLAG_END_OF_STREAM;
556 ALOGV("signalling eos");
557 }
558 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
559 work->worklets.front()->output.buffers.clear();
560 work->worklets.front()->output.buffers.push_back(buffer);
561 work->worklets.front()->output.ordinal = work->input.ordinal;
562 work->workletsProcessed = 1u;
563 };
564 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
565 fillWork(work);
566 } else {
567 finish(index, fillWork);
568 }
569}
570
571void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
572 const std::shared_ptr<C2BlockPool> &pool) {
573 work->result = C2_OK;
574 work->workletsProcessed = 0u;
575 work->worklets.front()->output.configUpdate.clear();
576 work->worklets.front()->output.flags = work->input.flags;
577 if (mSignalledError || mSignalledOutputEos) {
578 work->result = C2_BAD_VALUE;
579 return;
580 }
581
582 size_t inOffset = 0u;
583 size_t inSize = 0u;
584 C2ReadView rView = mDummyReadView;
585 if (!work->input.buffers.empty()) {
586 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
587 inSize = rView.capacity();
588 if (inSize && rView.error()) {
589 ALOGE("read view map failed %d", rView.error());
590 work->result = C2_CORRUPTED;
591 return;
592 }
593 }
594
595 bool codecConfig =
596 ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
597 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
598
599 ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
600 (int)work->input.ordinal.timestamp.peeku(),
601 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
602
603 if (codecConfig) {
604 fillEmptyWork(work);
605 return;
606 }
607
608 int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
609 if (inSize) {
610 uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700611
Ray Essick24754942022-04-16 09:50:35 -0700612 mTimeStart = systemTime();
613 nsecs_t delay = mTimeStart - mTimeEnd;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700614
615 const Libgav1StatusCode status =
James Zernb7aee6e2020-06-26 13:49:53 -0700616 mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
617 /*buffer_private_data=*/nullptr);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700618
Ray Essick24754942022-04-16 09:50:35 -0700619 mTimeEnd = systemTime();
620 nsecs_t decodeTime = mTimeEnd - mTimeStart;
621 ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700622
623 if (status != kLibgav1StatusOk) {
624 ALOGE("av1 decoder failed to decode frame. status: %d.", status);
625 work->result = C2_CORRUPTED;
626 work->workletsProcessed = 1u;
627 mSignalledError = true;
628 return;
629 }
630
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700631 }
632
633 (void)outputBuffer(pool, work);
634
635 if (eos) {
636 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
637 mSignalledOutputEos = true;
638 } else if (!inSize) {
639 fillEmptyWork(work);
640 }
641}
642
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530643void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
644 const std::unique_ptr<C2Work> &work) {
645 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
646 bool infoPresent = false;
647 if (buffer->has_hdr_mdcv) {
648 // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
649 hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
650 hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
651
652 hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
653 hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
654
655 hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
656 hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
657
658 // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
659 hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
660 hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
661
662 // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
663 hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
664 // hdr_mdcv.luminance_min is in 18.14 format.
665 hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
666 infoPresent = true;
667 }
668
669 if (buffer->has_hdr_cll) {
670 hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
671 hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
672 infoPresent = true;
673 }
674 // config if static info has changed
675 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
676 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
677 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
678 }
679}
680
681void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
682 const std::unique_ptr<C2Work> &work) {
683 if (buffer->has_itut_t35) {
684 std::vector<uint8_t> payload;
685 size_t payloadSize = buffer->itut_t35.payload_size;
686 if (payloadSize > 0) {
687 payload.push_back(buffer->itut_t35.country_code);
688 if (buffer->itut_t35.country_code == 0xFF) {
689 payload.push_back(buffer->itut_t35.country_code_extension_byte);
690 }
691 payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
692 buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
693 }
694
695 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
696 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
697 if (!hdr10PlusInfo) {
698 ALOGE("Hdr10PlusInfo allocation failed");
699 mSignalledError = true;
700 work->result = C2_NO_MEMORY;
701 return;
702 }
703 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
704
705 // config if hdr10Plus info has changed
706 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
707 mHdr10PlusInfo = std::move(hdr10PlusInfo);
708 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
709 }
710 }
711}
712
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530713void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
714 VuiColorAspects vuiColorAspects;
715 vuiColorAspects.primaries = buffer->color_primary;
716 vuiColorAspects.transfer = buffer->transfer_characteristics;
717 vuiColorAspects.coeffs = buffer->matrix_coefficients;
718 vuiColorAspects.fullRange = buffer->color_range;
719
720 // convert vui aspects to C2 values if changed
721 if (!(vuiColorAspects == mBitstreamColorAspects)) {
722 mBitstreamColorAspects = vuiColorAspects;
723 ColorAspects sfAspects;
724 C2StreamColorAspectsInfo::input codedAspects = { 0u };
725 ColorUtils::convertIsoColorAspectsToCodecAspects(
726 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
727 vuiColorAspects.fullRange, sfAspects);
728 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
729 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
730 }
731 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
732 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
733 }
734 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
735 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
736 }
737 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
738 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
739 }
740 std::vector<std::unique_ptr<C2SettingResult>> failures;
741 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
742 }
743}
744
James Zern0958b8b2023-02-17 21:48:08 -0800745void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
746 mSignalledError = true;
747 work->result = error;
748 work->workletsProcessed = 1u;
749}
750
751bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
752 if (size > mTmpFrameBufferSize) {
753 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
754 if (mTmpFrameBuffer == nullptr) {
755 mTmpFrameBufferSize = 0;
756 return false;
757 }
758 mTmpFrameBufferSize = size;
759 }
760 return true;
761}
762
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700763bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
764 const std::unique_ptr<C2Work> &work) {
765 if (!(work && pool)) return false;
766
767 const libgav1::DecoderBuffer *buffer;
768 const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
769
James Zernb7aee6e2020-06-26 13:49:53 -0700770 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700771 ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
772 return false;
773 }
774
James Zernb7aee6e2020-06-26 13:49:53 -0700775 // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
776 // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
777 // of two things:
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700778 // - The EnqueueFrame() call was either a flush (called with nullptr).
779 // - The enqueued frame did not have any displayable frames.
780 if (!buffer) {
781 return false;
782 }
783
784 const int width = buffer->displayed_width[0];
785 const int height = buffer->displayed_height[0];
786 if (width != mWidth || height != mHeight) {
787 mWidth = width;
788 mHeight = height;
789
790 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
791 std::vector<std::unique_ptr<C2SettingResult>> failures;
792 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
793 if (err == C2_OK) {
794 work->worklets.front()->output.configUpdate.push_back(
795 C2Param::Copy(size));
796 } else {
797 ALOGE("Config update size failed");
798 mSignalledError = true;
799 work->result = C2_CORRUPTED;
800 work->workletsProcessed = 1u;
801 return false;
802 }
803 }
804
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530805 getVuiParams(buffer);
Manisha Jajoo210d52e2022-05-19 17:11:55 +0530806 getHDRStaticParams(buffer, work);
807 getHDR10PlusInfoData(buffer, work);
808
James Zern0958b8b2023-02-17 21:48:08 -0800809#if LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800810 if (buffer->bitdepth == 10 &&
811 !(buffer->image_format == libgav1::kImageFormatYuv420 ||
812 buffer->image_format == libgav1::kImageFormatMonochrome400)) {
813 ALOGE("image_format %d not supported for 10bit", buffer->image_format);
814 mSignalledError = true;
815 work->workletsProcessed = 1u;
816 work->result = C2_CORRUPTED;
817 return false;
818 }
James Zern0958b8b2023-02-17 21:48:08 -0800819#endif
James Zern2c90f852023-02-17 21:42:27 -0800820
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800821 const bool isMonochrome =
822 buffer->image_format == libgav1::kImageFormatMonochrome400;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700823
824 std::shared_ptr<C2GraphicBlock> block;
825 uint32_t format = HAL_PIXEL_FORMAT_YV12;
Lajos Molnar45109a32022-06-03 09:41:48 -0700826 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
Harish Mahendrakar10c0c5d2022-05-30 20:35:16 -0700827 if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700828 IntfImpl::Lock lock = mIntf->lock();
Lajos Molnar45109a32022-06-03 09:41:48 -0700829 codedColorAspects = mIntf->getColorAspects_l();
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800830 bool allowRGBA1010102 = false;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530831 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
832 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
833 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800834 allowRGBA1010102 = true;
835 }
836 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
James Zern11b60d22023-03-02 19:21:44 -0800837#if !HAVE_LIBYUV_I410_I210_TO_AB30
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800838 if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
839 (buffer->image_format != libgav1::kImageFormatYuv420)) {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800840 ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
Harish Mahendrakar749a74c2022-01-27 16:47:09 -0800841 mSignalledError = true;
842 work->result = C2_OMITTED;
843 work->workletsProcessed = 1u;
844 return false;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700845 }
James Zern11b60d22023-03-02 19:21:44 -0800846#endif
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700847 }
Harish Mahendrakard4bbb762022-03-29 11:53:23 -0700848
849 if (mHalPixelFormat != format) {
850 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
851 std::vector<std::unique_ptr<C2SettingResult>> failures;
852 c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
853 if (err == C2_OK) {
854 work->worklets.front()->output.configUpdate.push_back(
855 C2Param::Copy(pixelFormat));
856 } else {
857 ALOGE("Config update pixelFormat failed");
858 mSignalledError = true;
859 work->workletsProcessed = 1u;
860 work->result = C2_CORRUPTED;
861 return UNKNOWN_ERROR;
862 }
863 mHalPixelFormat = format;
864 }
865
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700866 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
867
Vignesh Venkatasubramanianc4d385f2022-02-22 10:49:46 -0800868 // We always create a graphic block that is width aligned to 16 and height
869 // aligned to 2. We set the correct "crop" value of the image in the call to
870 // createGraphicBuffer() by setting the correct image dimensions.
871 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
872 align(mHeight, 2), format, usage,
873 &block);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700874
875 if (err != C2_OK) {
876 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
877 work->result = err;
878 return false;
879 }
880
881 C2GraphicView wView = block->map().get();
882
883 if (wView.error()) {
884 ALOGE("graphic view map failed %d", wView.error());
885 work->result = C2_CORRUPTED;
886 return false;
887 }
888
889 ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
890 block->height(), mWidth, mHeight, (int)buffer->user_private_data);
891
ming.zhouac19c3d2019-10-11 11:14:07 +0800892 uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
893 uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
894 uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700895
ming.zhouac19c3d2019-10-11 11:14:07 +0800896 C2PlanarLayout layout = wView.layout();
897 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
Vignesh Venkatasubramanian47b1d222023-01-12 21:45:40 +0000898 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
899 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
ming.zhouac19c3d2019-10-11 11:14:07 +0800900
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700901 if (buffer->bitdepth == 10) {
902 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
903 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
904 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -0800905 size_t srcYStride = buffer->stride[0] / 2;
906 size_t srcUStride = buffer->stride[1] / 2;
907 size_t srcVStride = buffer->stride[2] / 2;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700908
909 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
James Zern11b60d22023-03-02 19:21:44 -0800910 bool processed = false;
911#if HAVE_LIBYUV_I410_I210_TO_AB30
912 if (buffer->image_format == libgav1::kImageFormatYuv444) {
913 libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
914 dstY, dstYStride, &libyuv::kYuvV2020Constants,
915 mWidth, mHeight);
916 processed = true;
917 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
918 libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
919 dstY, dstYStride, &libyuv::kYuvV2020Constants,
920 mWidth, mHeight);
921 processed = true;
922 }
923#endif // HAVE_LIBYUV_I410_I210_TO_AB30
924 if (!processed) {
925 if (isMonochrome) {
926 const size_t tmpSize = mWidth;
927 const bool needFill = tmpSize > mTmpFrameBufferSize;
928 if (!allocTmpFrameBuffer(tmpSize)) {
929 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
930 setError(work, C2_NO_MEMORY);
931 return false;
932 }
933 srcU = srcV = mTmpFrameBuffer.get();
934 srcUStride = srcVStride = 0;
935 if (needFill) {
936 std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
937 }
938 }
939 convertYUV420Planar16ToY410OrRGBA1010102(
940 (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
941 srcUStride, srcVStride,
942 dstYStride / sizeof(uint32_t), mWidth, mHeight,
943 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
944 }
Harish Mahendrakar1b1aef22021-12-30 19:12:51 -0800945 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
James Zern0958b8b2023-02-17 21:48:08 -0800946 dstYStride /= 2;
947 dstUStride /= 2;
948 dstVStride /= 2;
949#if LIBYUV_VERSION >= 1779
950 if (buffer->image_format == libgav1::kImageFormatYuv444 ||
951 buffer->image_format == libgav1::kImageFormatYuv422) {
952 // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
953 // libyuv::I210ToP010 when they are available.
954 // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
955 // guarantees.
956 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
957 if (!allocTmpFrameBuffer(tmpSize)) {
958 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
959 setError(work, C2_NO_MEMORY);
960 return false;
961 }
962 uint16_t *const tmpY = mTmpFrameBuffer.get();
963 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
964 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
965 if (buffer->image_format == libgav1::kImageFormatYuv444) {
966 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
967 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
968 mWidth, mHeight);
969 } else {
970 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
971 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
972 mWidth, mHeight);
973 }
974 libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
975 (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
976 mWidth, mHeight);
977 } else {
978 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
979 srcYStride, srcUStride, srcVStride, dstYStride,
980 dstUStride, mWidth, mHeight, isMonochrome);
981 }
982#else // LIBYUV_VERSION < 1779
James Zern2c90f852023-02-17 21:42:27 -0800983 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
James Zern0958b8b2023-02-17 21:48:08 -0800984 srcYStride, srcUStride, srcVStride, dstYStride,
985 dstUStride, mWidth, mHeight, isMonochrome);
986#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700987 } else {
James Zern0958b8b2023-02-17 21:48:08 -0800988#if LIBYUV_VERSION >= 1779
989 if (buffer->image_format == libgav1::kImageFormatYuv444) {
990 // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
991 // it's available.
992 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
993 if (!allocTmpFrameBuffer(tmpSize)) {
994 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
995 setError(work, C2_NO_MEMORY);
996 return false;
997 }
998 uint16_t *const tmpY = mTmpFrameBuffer.get();
999 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
1000 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1001 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1002 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1003 mWidth, mHeight);
1004 libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1005 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1006 mWidth, mHeight);
1007 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1008 libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1009 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1010 mWidth, mHeight);
1011 } else {
1012 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1013 srcUStride, srcVStride, dstYStride, dstUStride,
1014 mWidth, mHeight, isMonochrome);
1015 }
1016#else // LIBYUV_VERSION < 1779
1017 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1018 srcUStride, srcVStride, dstYStride, dstUStride,
1019 mWidth, mHeight, isMonochrome);
1020#endif // LIBYUV_VERSION >= 1779
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001021 }
1022 } else {
1023 const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
1024 const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
1025 const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
James Zern0958b8b2023-02-17 21:48:08 -08001026 size_t srcYStride = buffer->stride[0];
1027 size_t srcUStride = buffer->stride[1];
1028 size_t srcVStride = buffer->stride[2];
Vignesh Venkatasubramanian406ed312022-04-21 10:32:55 -07001029
1030 if (buffer->image_format == libgav1::kImageFormatYuv444) {
1031 libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1032 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1033 mWidth, mHeight);
1034 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1035 libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1036 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1037 mWidth, mHeight);
1038 } else {
1039 convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1040 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1041 isMonochrome);
1042 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001043 }
1044 finishWork(buffer->user_private_data, work, std::move(block));
1045 block = nullptr;
1046 return true;
1047}
1048
1049c2_status_t C2SoftGav1Dec::drainInternal(
1050 uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
1051 const std::unique_ptr<C2Work> &work) {
1052 if (drainMode == NO_DRAIN) {
1053 ALOGW("drain with NO_DRAIN: no-op");
1054 return C2_OK;
1055 }
1056 if (drainMode == DRAIN_CHAIN) {
1057 ALOGW("DRAIN_CHAIN not supported");
1058 return C2_OMITTED;
1059 }
1060
James Zernb7aee6e2020-06-26 13:49:53 -07001061 const Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -07001062 if (status != kLibgav1StatusOk) {
1063 ALOGE("Failed to flush av1 decoder. status: %d.", status);
1064 return C2_CORRUPTED;
1065 }
1066
1067 while (outputBuffer(pool, work)) {
1068 }
1069
1070 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
1071 work->workletsProcessed == 0u) {
1072 fillEmptyWork(work);
1073 }
1074
1075 return C2_OK;
1076}
1077
1078c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
1079 const std::shared_ptr<C2BlockPool> &pool) {
1080 return drainInternal(drainMode, pool, nullptr);
1081}
1082
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001083class C2SoftGav1Factory : public C2ComponentFactory {
1084 public:
1085 C2SoftGav1Factory()
1086 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1087 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1088
1089 virtual c2_status_t createComponent(
1090 c2_node_id_t id, std::shared_ptr<C2Component> *const component,
1091 std::function<void(C2Component *)> deleter) override {
1092 *component = std::shared_ptr<C2Component>(
1093 new C2SoftGav1Dec(COMPONENT_NAME, id,
1094 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1095 deleter);
1096 return C2_OK;
1097 }
1098
1099 virtual c2_status_t createInterface(
1100 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1101 std::function<void(C2ComponentInterface *)> deleter) override {
1102 *interface = std::shared_ptr<C2ComponentInterface>(
1103 new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
1104 COMPONENT_NAME, id,
1105 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1106 deleter);
1107 return C2_OK;
1108 }
1109
1110 virtual ~C2SoftGav1Factory() override = default;
1111
1112 private:
1113 std::shared_ptr<C2ReflectorHelper> mHelper;
1114};
1115
1116} // namespace android
1117
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001118__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001119extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1120 ALOGV("in %s", __func__);
1121 return new ::android::C2SoftGav1Factory();
1122}
1123
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -08001124__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001125extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1126 ALOGV("in %s", __func__);
1127 delete factory;
1128}