blob: 475d86392a4abf72c2be5b563b9e43b201709ab2 [file] [log] [blame]
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftGav1Dec"
19#include "C2SoftGav1Dec.h"
20
21#include <C2Debug.h>
22#include <C2PlatformSupport.h>
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -080023#include <Codec2BufferUtils.h>
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +053024#include <Codec2Mapper.h>
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070025#include <SimpleC2Interface.h>
26#include <log/log.h>
27#include <media/stagefright/foundation/AUtils.h>
28#include <media/stagefright/foundation/MediaDefs.h>
29
30namespace android {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -080031namespace {
32
33constexpr uint8_t NEUTRAL_UV_VALUE = 128;
34
35} // namespace
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070036
Ray Essickc2cc4372019-08-21 14:02:28 -070037// codecname set and passed in as a compile flag from Android.bp
38constexpr char COMPONENT_NAME[] = CODECNAME;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070039
40class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
41 public:
42 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
43 : SimpleInterface<void>::BaseParams(
44 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
45 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
46 noPrivateBuffers(); // TODO: account for our buffers here.
47 noInputReferences();
48 noOutputReferences();
49 noInputLatency();
50 noTimeStretch();
51
52 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
53 .withConstValue(new C2ComponentAttributesSetting(
54 C2Component::ATTRIB_IS_TEMPORAL))
55 .build());
56
57 addParameter(
58 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
59 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
60 .withFields({
Vignesh Venkatasubramanian2d8d4702021-01-25 09:42:44 -080061 C2F(mSize, width).inRange(2, 4096, 2),
62 C2F(mSize, height).inRange(2, 4096, 2),
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070063 })
64 .withSetter(SizeSetter)
65 .build());
66
67 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
68 .withDefault(new C2StreamProfileLevelInfo::input(
69 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
70 .withFields({C2F(mProfileLevel, profile)
71 .oneOf({C2Config::PROFILE_AV1_0,
72 C2Config::PROFILE_AV1_1}),
73 C2F(mProfileLevel, level)
74 .oneOf({
Harish Mahendrakar1ad8c3b2021-06-04 15:42:31 -070075 C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
76 C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
77 C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
78 C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
79 C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
80 C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
81 C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
82 C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -070083 })})
84 .withSetter(ProfileLevelSetter, mSize)
85 .build());
86
87 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
88 addParameter(
89 DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
90 .withDefault(mHdr10PlusInfoInput)
91 .withFields({
92 C2F(mHdr10PlusInfoInput, m.value).any(),
93 })
94 .withSetter(Hdr10PlusInfoInputSetter)
95 .build());
96
97 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
98 addParameter(
99 DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
100 .withDefault(mHdr10PlusInfoOutput)
101 .withFields({
102 C2F(mHdr10PlusInfoOutput, m.value).any(),
103 })
104 .withSetter(Hdr10PlusInfoOutputSetter)
105 .build());
106
107 addParameter(
108 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
109 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
110 .withFields({
111 C2F(mSize, width).inRange(2, 2048, 2),
112 C2F(mSize, height).inRange(2, 2048, 2),
113 })
114 .withSetter(MaxPictureSizeSetter, mSize)
115 .build());
116
117 addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
118 .withDefault(new C2StreamMaxBufferSizeInfo::input(
119 0u, 320 * 240 * 3 / 4))
120 .withFields({
121 C2F(mMaxInputSize, value).any(),
122 })
123 .calculatedAs(MaxInputSizeSetter, mMaxSize)
124 .build());
125
126 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
127 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
128 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
129 C2Color::YUV_420);
130 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
131
132 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
133 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
134 C2Color::YUV_420);
135 helper->addStructDescriptors<C2ChromaOffsetStruct>();
136
137 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
138 .withConstValue(defaultColorInfo)
139 .build());
140
141 addParameter(
142 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
143 .withDefault(new C2StreamColorAspectsTuning::output(
144 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
145 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
146 .withFields(
147 {C2F(mDefaultColorAspects, range)
148 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
149 C2F(mDefaultColorAspects, primaries)
150 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
151 C2Color::PRIMARIES_OTHER),
152 C2F(mDefaultColorAspects, transfer)
153 .inRange(C2Color::TRANSFER_UNSPECIFIED,
154 C2Color::TRANSFER_OTHER),
155 C2F(mDefaultColorAspects, matrix)
156 .inRange(C2Color::MATRIX_UNSPECIFIED,
157 C2Color::MATRIX_OTHER)})
158 .withSetter(DefaultColorAspectsSetter)
159 .build());
160
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530161 addParameter(
162 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
163 .withDefault(new C2StreamColorAspectsInfo::input(
164 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
165 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
166 .withFields({
167 C2F(mCodedColorAspects, range).inRange(
168 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
169 C2F(mCodedColorAspects, primaries).inRange(
170 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
171 C2F(mCodedColorAspects, transfer).inRange(
172 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
173 C2F(mCodedColorAspects, matrix).inRange(
174 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
175 })
176 .withSetter(CodedColorAspectsSetter)
177 .build());
178
179 addParameter(
180 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
181 .withDefault(new C2StreamColorAspectsInfo::output(
182 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
183 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
184 .withFields({
185 C2F(mColorAspects, range).inRange(
186 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
187 C2F(mColorAspects, primaries).inRange(
188 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
189 C2F(mColorAspects, transfer).inRange(
190 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
191 C2F(mColorAspects, matrix).inRange(
192 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
193 })
194 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
195 .build());
196
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700197 // TODO: support more formats?
198 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
199 .withConstValue(new C2StreamPixelFormatInfo::output(
200 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
201 .build());
202 }
203
204 static C2R SizeSetter(bool mayBlock,
205 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
206 C2P<C2StreamPictureSizeInfo::output> &me) {
207 (void)mayBlock;
208 C2R res = C2R::Ok();
209 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
210 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
211 me.set().width = oldMe.v.width;
212 }
213 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
214 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
215 me.set().height = oldMe.v.height;
216 }
217 return res;
218 }
219
220 static C2R MaxPictureSizeSetter(
221 bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
222 const C2P<C2StreamPictureSizeInfo::output> &size) {
223 (void)mayBlock;
224 // TODO: get max width/height from the size's field helpers vs.
225 // hardcoding
226 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
227 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
228 return C2R::Ok();
229 }
230
231 static C2R MaxInputSizeSetter(
232 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
233 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
234 (void)mayBlock;
235 // assume compression ratio of 2
236 me.set().value =
237 (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072);
238 return C2R::Ok();
239 }
240
241 static C2R DefaultColorAspectsSetter(
242 bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
243 (void)mayBlock;
244 if (me.v.range > C2Color::RANGE_OTHER) {
245 me.set().range = C2Color::RANGE_OTHER;
246 }
247 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
248 me.set().primaries = C2Color::PRIMARIES_OTHER;
249 }
250 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
251 me.set().transfer = C2Color::TRANSFER_OTHER;
252 }
253 if (me.v.matrix > C2Color::MATRIX_OTHER) {
254 me.set().matrix = C2Color::MATRIX_OTHER;
255 }
256 return C2R::Ok();
257 }
258
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530259 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
260 (void)mayBlock;
261 if (me.v.range > C2Color::RANGE_OTHER) {
262 me.set().range = C2Color::RANGE_OTHER;
263 }
264 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
265 me.set().primaries = C2Color::PRIMARIES_OTHER;
266 }
267 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
268 me.set().transfer = C2Color::TRANSFER_OTHER;
269 }
270 if (me.v.matrix > C2Color::MATRIX_OTHER) {
271 me.set().matrix = C2Color::MATRIX_OTHER;
272 }
273 return C2R::Ok();
274 }
275
276 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
277 const C2P<C2StreamColorAspectsTuning::output> &def,
278 const C2P<C2StreamColorAspectsInfo::input> &coded) {
279 (void)mayBlock;
280 // take default values for all unspecified fields, and coded values for specified ones
281 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
282 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
283 ? def.v.primaries : coded.v.primaries;
284 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
285 ? def.v.transfer : coded.v.transfer;
286 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
287 return C2R::Ok();
288 }
289
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700290 static C2R ProfileLevelSetter(
291 bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
292 const C2P<C2StreamPictureSizeInfo::output> &size) {
293 (void)mayBlock;
294 (void)size;
295 (void)me; // TODO: validate
296 return C2R::Ok();
297 }
298
299 std::shared_ptr<C2StreamColorAspectsTuning::output>
300 getDefaultColorAspects_l() {
301 return mDefaultColorAspects;
302 }
303
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530304 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
305 return mColorAspects;
306 }
307
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700308 static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
309 C2P<C2StreamHdr10PlusInfo::input> &me) {
310 (void)mayBlock;
311 (void)me; // TODO: validate
312 return C2R::Ok();
313 }
314
315 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
316 C2P<C2StreamHdr10PlusInfo::output> &me) {
317 (void)mayBlock;
318 (void)me; // TODO: validate
319 return C2R::Ok();
320 }
321
322 private:
323 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
324 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
325 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
326 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
327 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
328 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
329 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530330 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
331 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700332 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
333 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
334};
335
336C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
337 const std::shared_ptr<IntfImpl> &intfImpl)
338 : SimpleC2Component(
339 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700340 mIntf(intfImpl),
341 mCodecCtx(nullptr) {
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -0800342 mIsFormatR10G10B10A2Supported = IsFormatR10G10B10A2SupportedForLegacyRendering();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700343 gettimeofday(&mTimeStart, nullptr);
344 gettimeofday(&mTimeEnd, nullptr);
345}
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700346
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700347C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
348
349c2_status_t C2SoftGav1Dec::onInit() {
350 return initDecoder() ? C2_OK : C2_CORRUPTED;
351}
352
353c2_status_t C2SoftGav1Dec::onStop() {
354 mSignalledError = false;
355 mSignalledOutputEos = false;
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700356 return C2_OK;
357}
358
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700359void C2SoftGav1Dec::onReset() {
360 (void)onStop();
361 c2_status_t err = onFlush_sm();
362 if (err != C2_OK) {
363 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
364 destroyDecoder();
365 if (!initDecoder()) {
366 ALOGE("Hard reset failed.");
367 }
368 }
369}
370
371void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
372
373c2_status_t C2SoftGav1Dec::onFlush_sm() {
James Zernb7aee6e2020-06-26 13:49:53 -0700374 Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700375 if (status != kLibgav1StatusOk) {
376 ALOGE("Failed to flush av1 decoder. status: %d.", status);
377 return C2_CORRUPTED;
378 }
379
380 // Dequeue frame (if any) that was enqueued previously.
381 const libgav1::DecoderBuffer *buffer;
382 status = mCodecCtx->DequeueFrame(&buffer);
James Zernb7aee6e2020-06-26 13:49:53 -0700383 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700384 ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
385 status);
386 return C2_CORRUPTED;
387 }
388
389 mSignalledError = false;
390 mSignalledOutputEos = false;
391
392 return C2_OK;
393}
394
395static int GetCPUCoreCount() {
396 int cpuCoreCount = 1;
397#if defined(_SC_NPROCESSORS_ONLN)
398 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
399#else
400 // _SC_NPROC_ONLN must be defined...
401 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
402#endif
403 CHECK(cpuCoreCount >= 1);
404 ALOGV("Number of CPU cores: %d", cpuCoreCount);
405 return cpuCoreCount;
406}
407
408bool C2SoftGav1Dec::initDecoder() {
409 mSignalledError = false;
410 mSignalledOutputEos = false;
411 mCodecCtx.reset(new libgav1::Decoder());
412
413 if (mCodecCtx == nullptr) {
414 ALOGE("mCodecCtx is null");
415 return false;
416 }
417
418 libgav1::DecoderSettings settings = {};
419 settings.threads = GetCPUCoreCount();
420
Vignesh Venkatasubramanian61ba2cf2019-06-24 10:04:00 -0700421 ALOGV("Using libgav1 AV1 software decoder.");
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700422 Libgav1StatusCode status = mCodecCtx->Init(&settings);
423 if (status != kLibgav1StatusOk) {
424 ALOGE("av1 decoder failed to initialize. status: %d.", status);
425 return false;
426 }
427
428 return true;
429}
430
431void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
432
433void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
434 uint32_t flags = 0;
435 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
436 flags |= C2FrameData::FLAG_END_OF_STREAM;
437 ALOGV("signalling eos");
438 }
439 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
440 work->worklets.front()->output.buffers.clear();
441 work->worklets.front()->output.ordinal = work->input.ordinal;
442 work->workletsProcessed = 1u;
443}
444
445void C2SoftGav1Dec::finishWork(uint64_t index,
446 const std::unique_ptr<C2Work> &work,
447 const std::shared_ptr<C2GraphicBlock> &block) {
448 std::shared_ptr<C2Buffer> buffer =
449 createGraphicBuffer(block, C2Rect(mWidth, mHeight));
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530450 {
451 IntfImpl::Lock lock = mIntf->lock();
452 buffer->setInfo(mIntf->getColorAspects_l());
453 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700454 auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
455 uint32_t flags = 0;
456 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
457 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
458 flags |= C2FrameData::FLAG_END_OF_STREAM;
459 ALOGV("signalling eos");
460 }
461 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
462 work->worklets.front()->output.buffers.clear();
463 work->worklets.front()->output.buffers.push_back(buffer);
464 work->worklets.front()->output.ordinal = work->input.ordinal;
465 work->workletsProcessed = 1u;
466 };
467 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
468 fillWork(work);
469 } else {
470 finish(index, fillWork);
471 }
472}
473
474void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
475 const std::shared_ptr<C2BlockPool> &pool) {
476 work->result = C2_OK;
477 work->workletsProcessed = 0u;
478 work->worklets.front()->output.configUpdate.clear();
479 work->worklets.front()->output.flags = work->input.flags;
480 if (mSignalledError || mSignalledOutputEos) {
481 work->result = C2_BAD_VALUE;
482 return;
483 }
484
485 size_t inOffset = 0u;
486 size_t inSize = 0u;
487 C2ReadView rView = mDummyReadView;
488 if (!work->input.buffers.empty()) {
489 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
490 inSize = rView.capacity();
491 if (inSize && rView.error()) {
492 ALOGE("read view map failed %d", rView.error());
493 work->result = C2_CORRUPTED;
494 return;
495 }
496 }
497
498 bool codecConfig =
499 ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
500 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
501
502 ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
503 (int)work->input.ordinal.timestamp.peeku(),
504 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
505
506 if (codecConfig) {
507 fillEmptyWork(work);
508 return;
509 }
510
511 int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
512 if (inSize) {
513 uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
514 int32_t decodeTime = 0;
515 int32_t delay = 0;
516
517 GETTIME(&mTimeStart, nullptr);
518 TIME_DIFF(mTimeEnd, mTimeStart, delay);
519
520 const Libgav1StatusCode status =
James Zernb7aee6e2020-06-26 13:49:53 -0700521 mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
522 /*buffer_private_data=*/nullptr);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700523
524 GETTIME(&mTimeEnd, nullptr);
525 TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
526 ALOGV("decodeTime=%4d delay=%4d\n", decodeTime, delay);
527
528 if (status != kLibgav1StatusOk) {
529 ALOGE("av1 decoder failed to decode frame. status: %d.", status);
530 work->result = C2_CORRUPTED;
531 work->workletsProcessed = 1u;
532 mSignalledError = true;
533 return;
534 }
535
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700536 }
537
538 (void)outputBuffer(pool, work);
539
540 if (eos) {
541 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
542 mSignalledOutputEos = true;
543 } else if (!inSize) {
544 fillEmptyWork(work);
545 }
546}
547
ming.zhouac19c3d2019-10-11 11:14:07 +0800548static void copyOutputBufferToYV12Frame(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
549 const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
550 size_t srcYStride, size_t srcUStride, size_t srcVStride,
551 size_t dstYStride, size_t dstUVStride,
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800552 uint32_t width, uint32_t height,
553 bool isMonochrome) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700554
555 for (size_t i = 0; i < height; ++i) {
ming.zhouac19c3d2019-10-11 11:14:07 +0800556 memcpy(dstY, srcY, width);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700557 srcY += srcYStride;
ming.zhouac19c3d2019-10-11 11:14:07 +0800558 dstY += dstYStride;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700559 }
560
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800561 if (isMonochrome) {
562 // Fill with neutral U/V values.
563 for (size_t i = 0; i < height / 2; ++i) {
564 memset(dstV, NEUTRAL_UV_VALUE, width / 2);
565 memset(dstU, NEUTRAL_UV_VALUE, width / 2);
566 dstV += dstUVStride;
567 dstU += dstUVStride;
568 }
569 return;
570 }
571
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700572 for (size_t i = 0; i < height / 2; ++i) {
ming.zhouac19c3d2019-10-11 11:14:07 +0800573 memcpy(dstV, srcV, width / 2);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700574 srcV += srcVStride;
ming.zhouac19c3d2019-10-11 11:14:07 +0800575 dstV += dstUVStride;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700576 }
577
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700578 for (size_t i = 0; i < height / 2; ++i) {
ming.zhouac19c3d2019-10-11 11:14:07 +0800579 memcpy(dstU, srcU, width / 2);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700580 srcU += srcUStride;
ming.zhouac19c3d2019-10-11 11:14:07 +0800581 dstU += dstUVStride;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700582 }
583}
584
585static void convertYUV420Planar16ToY410(uint32_t *dst, const uint16_t *srcY,
586 const uint16_t *srcU,
587 const uint16_t *srcV, size_t srcYStride,
588 size_t srcUStride, size_t srcVStride,
589 size_t dstStride, size_t width,
590 size_t height) {
591 // Converting two lines at a time, slightly faster
592 for (size_t y = 0; y < height; y += 2) {
593 uint32_t *dstTop = (uint32_t *)dst;
594 uint32_t *dstBot = (uint32_t *)(dst + dstStride);
595 uint16_t *ySrcTop = (uint16_t *)srcY;
596 uint16_t *ySrcBot = (uint16_t *)(srcY + srcYStride);
597 uint16_t *uSrc = (uint16_t *)srcU;
598 uint16_t *vSrc = (uint16_t *)srcV;
599
600 uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
601 size_t x = 0;
602 for (; x < width - 3; x += 4) {
603 u01 = *((uint32_t *)uSrc);
604 uSrc += 2;
605 v01 = *((uint32_t *)vSrc);
606 vSrc += 2;
607
608 y01 = *((uint32_t *)ySrcTop);
609 ySrcTop += 2;
610 y23 = *((uint32_t *)ySrcTop);
611 ySrcTop += 2;
612 y45 = *((uint32_t *)ySrcBot);
613 ySrcBot += 2;
614 y67 = *((uint32_t *)ySrcBot);
615 ySrcBot += 2;
616
617 uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
618 uv1 = (u01 >> 16) | ((v01 >> 16) << 20);
619
620 *dstTop++ = 3 << 30 | ((y01 & 0x3FF) << 10) | uv0;
621 *dstTop++ = 3 << 30 | ((y01 >> 16) << 10) | uv0;
622 *dstTop++ = 3 << 30 | ((y23 & 0x3FF) << 10) | uv1;
623 *dstTop++ = 3 << 30 | ((y23 >> 16) << 10) | uv1;
624
625 *dstBot++ = 3 << 30 | ((y45 & 0x3FF) << 10) | uv0;
626 *dstBot++ = 3 << 30 | ((y45 >> 16) << 10) | uv0;
627 *dstBot++ = 3 << 30 | ((y67 & 0x3FF) << 10) | uv1;
628 *dstBot++ = 3 << 30 | ((y67 >> 16) << 10) | uv1;
629 }
630
631 // There should be at most 2 more pixels to process. Note that we don't
632 // need to consider odd case as the buffer is always aligned to even.
633 if (x < width) {
634 u01 = *uSrc;
635 v01 = *vSrc;
636 y01 = *((uint32_t *)ySrcTop);
637 y45 = *((uint32_t *)ySrcBot);
638 uv0 = (u01 & 0x3FF) | ((v01 & 0x3FF) << 20);
639 *dstTop++ = ((y01 & 0x3FF) << 10) | uv0;
640 *dstTop++ = ((y01 >> 16) << 10) | uv0;
641 *dstBot++ = ((y45 & 0x3FF) << 10) | uv0;
642 *dstBot++ = ((y45 >> 16) << 10) | uv0;
643 }
644
645 srcY += srcYStride * 2;
646 srcU += srcUStride;
647 srcV += srcVStride;
648 dst += dstStride * 2;
649 }
650}
651
652static void convertYUV420Planar16ToYUV420Planar(
ming.zhouac19c3d2019-10-11 11:14:07 +0800653 uint8_t *dstY, uint8_t *dstU, uint8_t *dstV,
654 const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
655 size_t srcYStride, size_t srcUStride, size_t srcVStride,
656 size_t dstYStride, size_t dstUVStride,
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800657 size_t width, size_t height, bool isMonochrome) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700658
659 for (size_t y = 0; y < height; ++y) {
660 for (size_t x = 0; x < width; ++x) {
661 dstY[x] = (uint8_t)(srcY[x] >> 2);
662 }
663
664 srcY += srcYStride;
ming.zhouac19c3d2019-10-11 11:14:07 +0800665 dstY += dstYStride;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700666 }
667
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800668 if (isMonochrome) {
669 // Fill with neutral U/V values.
670 for (size_t y = 0; y < (height + 1) / 2; ++y) {
671 memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
672 memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
673 dstV += dstUVStride;
674 dstU += dstUVStride;
675 }
676 return;
677 }
678
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700679 for (size_t y = 0; y < (height + 1) / 2; ++y) {
680 for (size_t x = 0; x < (width + 1) / 2; ++x) {
681 dstU[x] = (uint8_t)(srcU[x] >> 2);
682 dstV[x] = (uint8_t)(srcV[x] >> 2);
683 }
684
685 srcU += srcUStride;
686 srcV += srcVStride;
687 dstU += dstUVStride;
688 dstV += dstUVStride;
689 }
690}
691
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530692void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
693 VuiColorAspects vuiColorAspects;
694 vuiColorAspects.primaries = buffer->color_primary;
695 vuiColorAspects.transfer = buffer->transfer_characteristics;
696 vuiColorAspects.coeffs = buffer->matrix_coefficients;
697 vuiColorAspects.fullRange = buffer->color_range;
698
699 // convert vui aspects to C2 values if changed
700 if (!(vuiColorAspects == mBitstreamColorAspects)) {
701 mBitstreamColorAspects = vuiColorAspects;
702 ColorAspects sfAspects;
703 C2StreamColorAspectsInfo::input codedAspects = { 0u };
704 ColorUtils::convertIsoColorAspectsToCodecAspects(
705 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
706 vuiColorAspects.fullRange, sfAspects);
707 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
708 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
709 }
710 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
711 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
712 }
713 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
714 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
715 }
716 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
717 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
718 }
719 std::vector<std::unique_ptr<C2SettingResult>> failures;
720 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
721 }
722}
723
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700724bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
725 const std::unique_ptr<C2Work> &work) {
726 if (!(work && pool)) return false;
727
728 const libgav1::DecoderBuffer *buffer;
729 const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
730
James Zernb7aee6e2020-06-26 13:49:53 -0700731 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700732 ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
733 return false;
734 }
735
James Zernb7aee6e2020-06-26 13:49:53 -0700736 // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
737 // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
738 // of two things:
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700739 // - The EnqueueFrame() call was either a flush (called with nullptr).
740 // - The enqueued frame did not have any displayable frames.
741 if (!buffer) {
742 return false;
743 }
744
745 const int width = buffer->displayed_width[0];
746 const int height = buffer->displayed_height[0];
747 if (width != mWidth || height != mHeight) {
748 mWidth = width;
749 mHeight = height;
750
751 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
752 std::vector<std::unique_ptr<C2SettingResult>> failures;
753 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
754 if (err == C2_OK) {
755 work->worklets.front()->output.configUpdate.push_back(
756 C2Param::Copy(size));
757 } else {
758 ALOGE("Config update size failed");
759 mSignalledError = true;
760 work->result = C2_CORRUPTED;
761 work->workletsProcessed = 1u;
762 return false;
763 }
764 }
765
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530766 getVuiParams(buffer);
James Zern4d25d652021-06-22 18:10:13 -0700767 if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
768 buffer->image_format == libgav1::kImageFormatMonochrome400)) {
769 ALOGE("image_format %d not supported", buffer->image_format);
770 mSignalledError = true;
771 work->workletsProcessed = 1u;
772 work->result = C2_CORRUPTED;
773 return false;
774 }
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800775 const bool isMonochrome =
776 buffer->image_format == libgav1::kImageFormatMonochrome400;
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700777
778 std::shared_ptr<C2GraphicBlock> block;
779 uint32_t format = HAL_PIXEL_FORMAT_YV12;
780 if (buffer->bitdepth == 10) {
781 IntfImpl::Lock lock = mIntf->lock();
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530782 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects =
783 mIntf->getColorAspects_l();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700784
Neelkamal Semwalc13a76a2021-09-01 17:07:30 +0530785 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
786 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
787 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800788 if (buffer->image_format != libgav1::kImageFormatYuv420) {
789 ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
790 mSignalledError = true;
791 work->result = C2_OMITTED;
792 work->workletsProcessed = 1u;
793 return false;
794 }
Harish Mahendrakarf0fa7a22021-12-10 20:36:32 -0800795 // TODO (b/201787956) For devices that do not support HAL_PIXEL_FORMAT_RGBA_1010102,
796 // HAL_PIXEL_FORMAT_YV12 is used as a temporary work around.
797 if (!mIsFormatR10G10B10A2Supported) {
798 ALOGE("HAL_PIXEL_FORMAT_RGBA_1010102 isn't supported");
799 format = HAL_PIXEL_FORMAT_YV12;
800 } else {
801 format = HAL_PIXEL_FORMAT_RGBA_1010102;
802 }
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700803 }
804 }
805 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
806
807 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format,
808 usage, &block);
809
810 if (err != C2_OK) {
811 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
812 work->result = err;
813 return false;
814 }
815
816 C2GraphicView wView = block->map().get();
817
818 if (wView.error()) {
819 ALOGE("graphic view map failed %d", wView.error());
820 work->result = C2_CORRUPTED;
821 return false;
822 }
823
824 ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
825 block->height(), mWidth, mHeight, (int)buffer->user_private_data);
826
ming.zhouac19c3d2019-10-11 11:14:07 +0800827 uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
828 uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
829 uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700830 size_t srcYStride = buffer->stride[0];
831 size_t srcUStride = buffer->stride[1];
832 size_t srcVStride = buffer->stride[2];
833
ming.zhouac19c3d2019-10-11 11:14:07 +0800834 C2PlanarLayout layout = wView.layout();
835 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
836 size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
837
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700838 if (buffer->bitdepth == 10) {
839 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
840 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
841 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
842
843 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
844 convertYUV420Planar16ToY410(
ming.zhouac19c3d2019-10-11 11:14:07 +0800845 (uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
James Zern8dfb3f22020-08-07 18:49:51 -0700846 srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700847 } else {
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800848 convertYUV420Planar16ToYUV420Planar(
849 dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
850 srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
851 isMonochrome);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700852 }
853 } else {
854 const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
855 const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
856 const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
Vignesh Venkatasubramanianca7d1ab2021-02-04 12:39:06 -0800857 copyOutputBufferToYV12Frame(
858 dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
859 dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700860 }
861 finishWork(buffer->user_private_data, work, std::move(block));
862 block = nullptr;
863 return true;
864}
865
866c2_status_t C2SoftGav1Dec::drainInternal(
867 uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
868 const std::unique_ptr<C2Work> &work) {
869 if (drainMode == NO_DRAIN) {
870 ALOGW("drain with NO_DRAIN: no-op");
871 return C2_OK;
872 }
873 if (drainMode == DRAIN_CHAIN) {
874 ALOGW("DRAIN_CHAIN not supported");
875 return C2_OMITTED;
876 }
877
James Zernb7aee6e2020-06-26 13:49:53 -0700878 const Libgav1StatusCode status = mCodecCtx->SignalEOS();
Vignesh Venkatasubramanian0f3e7422019-06-17 16:21:36 -0700879 if (status != kLibgav1StatusOk) {
880 ALOGE("Failed to flush av1 decoder. status: %d.", status);
881 return C2_CORRUPTED;
882 }
883
884 while (outputBuffer(pool, work)) {
885 }
886
887 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
888 work->workletsProcessed == 0u) {
889 fillEmptyWork(work);
890 }
891
892 return C2_OK;
893}
894
895c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
896 const std::shared_ptr<C2BlockPool> &pool) {
897 return drainInternal(drainMode, pool, nullptr);
898}
899
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700900class C2SoftGav1Factory : public C2ComponentFactory {
901 public:
902 C2SoftGav1Factory()
903 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
904 GetCodec2PlatformComponentStore()->getParamReflector())) {}
905
906 virtual c2_status_t createComponent(
907 c2_node_id_t id, std::shared_ptr<C2Component> *const component,
908 std::function<void(C2Component *)> deleter) override {
909 *component = std::shared_ptr<C2Component>(
910 new C2SoftGav1Dec(COMPONENT_NAME, id,
911 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
912 deleter);
913 return C2_OK;
914 }
915
916 virtual c2_status_t createInterface(
917 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
918 std::function<void(C2ComponentInterface *)> deleter) override {
919 *interface = std::shared_ptr<C2ComponentInterface>(
920 new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
921 COMPONENT_NAME, id,
922 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
923 deleter);
924 return C2_OK;
925 }
926
927 virtual ~C2SoftGav1Factory() override = default;
928
929 private:
930 std::shared_ptr<C2ReflectorHelper> mHelper;
931};
932
933} // namespace android
934
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -0800935__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700936extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
937 ALOGV("in %s", __func__);
938 return new ::android::C2SoftGav1Factory();
939}
940
Cindy Zhouf6c0c3c2020-12-02 10:53:40 -0800941__attribute__((cfi_canonical_jump_table))
Vignesh Venkatasubramanianb6d383d2019-06-10 15:11:58 -0700942extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
943 ALOGV("in %s", __func__);
944 delete factory;
945}