blob: 2a330481201f4e81f24d0f442f6bd2f69368b5f9 [file] [log] [blame]
Pawin Vongmasa36653902018-11-15 00:10:25 -08001/*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftVpxEnc"
19#include <log/log.h>
20#include <utils/misc.h>
21
22#include <media/hardware/VideoAPI.h>
23
24#include <Codec2BufferUtils.h>
25#include <C2Debug.h>
26#include "C2SoftVpxEnc.h"
27
28#ifndef INT32_MAX
29#define INT32_MAX 2147483647
30#endif
31
Neelkamal Semwale4691832021-05-25 14:00:07 +053032/* Quantization param values defined by the spec */
33#define VPX_QP_MIN 0
34#define VPX_QP_MAX 63
35#define VPX_QP_DEFAULT_MIN VPX_QP_MIN
36#define VPX_QP_DEFAULT_MAX VPX_QP_MAX
37
Pawin Vongmasa36653902018-11-15 00:10:25 -080038namespace android {
39
Manisha Jajooc743a112021-09-06 20:46:04 +053040C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
41 : SimpleInterface<void>::BaseParams(
42 helper,
43 COMPONENT_NAME,
44 C2Component::KIND_ENCODER,
45 C2Component::DOMAIN_VIDEO,
46 MEDIA_MIMETYPE_VIDEO) {
47 noPrivateBuffers(); // TODO: account for our buffers here
48 noInputReferences();
49 noOutputReferences();
50 noInputLatency();
51 noTimeStretch();
52 setDerivedInstance(this);
53
54 addParameter(
55 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
56 .withConstValue(new C2ComponentAttributesSetting(
57 C2Component::ATTRIB_IS_TEMPORAL))
58 .build());
59
60 addParameter(
61 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
62 .withConstValue(new C2StreamUsageTuning::input(
63 0u, (uint64_t)C2MemoryUsage::CPU_READ))
64 .build());
65
66 addParameter(
67 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
Manisha Jajoodf62e0f2021-11-12 18:57:28 +053068 .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
Manisha Jajooc743a112021-09-06 20:46:04 +053069 .withFields({
70 C2F(mSize, width).inRange(2, 2048, 2),
71 C2F(mSize, height).inRange(2, 2048, 2),
72 })
73 .withSetter(SizeSetter)
74 .build());
75
76 addParameter(
77 DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
78 .withDefault(new C2StreamBitrateModeTuning::output(
79 0u, C2Config::BITRATE_VARIABLE))
80 .withFields({
81 C2F(mBitrateMode, value).oneOf({
82 C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
83 })
84 .withSetter(
85 Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
86 .build());
87
88 addParameter(
89 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
Manisha Jajoodf62e0f2021-11-12 18:57:28 +053090 .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
Manisha Jajooc743a112021-09-06 20:46:04 +053091 // TODO: More restriction?
92 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
93 .withSetter(
94 Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
95 .build());
96
97 addParameter(
98 DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
99 .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
100 .withFields({
101 C2F(mLayering, m.layerCount).inRange(0, 4),
102 C2F(mLayering, m.bLayerCount).inRange(0, 0),
103 C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
104 })
105 .withSetter(LayeringSetter)
106 .build());
107
108 addParameter(
109 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
110 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
111 .withFields({C2F(mSyncFramePeriod, value).any()})
112 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
113 .build());
114
115 addParameter(
116 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
117 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
118 .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
119 .withSetter(BitrateSetter)
120 .build());
121
122 addParameter(
123 DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
124 .withConstValue(new C2StreamIntraRefreshTuning::output(
125 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
126 .build());
127#ifdef VP9
128 addParameter(
129 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
130 .withDefault(new C2StreamProfileLevelInfo::output(
131 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
132 .withFields({
133 C2F(mProfileLevel, profile).equalTo(
134 PROFILE_VP9_0
135 ),
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530136 C2F(mProfileLevel, level).oneOf({
137 C2Config::LEVEL_VP9_1,
138 C2Config::LEVEL_VP9_1_1,
139 C2Config::LEVEL_VP9_2,
140 C2Config::LEVEL_VP9_2_1,
141 C2Config::LEVEL_VP9_3,
142 C2Config::LEVEL_VP9_3_1,
143 C2Config::LEVEL_VP9_4,
144 C2Config::LEVEL_VP9_4_1,
145 }),
Manisha Jajooc743a112021-09-06 20:46:04 +0530146 })
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530147 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
Manisha Jajooc743a112021-09-06 20:46:04 +0530148 .build());
149#else
150 addParameter(
151 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
152 .withDefault(new C2StreamProfileLevelInfo::output(
153 0u, PROFILE_VP8_0, LEVEL_UNUSED))
154 .withFields({
155 C2F(mProfileLevel, profile).equalTo(
156 PROFILE_VP8_0
157 ),
158 C2F(mProfileLevel, level).equalTo(
159 LEVEL_UNUSED),
160 })
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530161 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
Manisha Jajooc743a112021-09-06 20:46:04 +0530162 .build());
163#endif
164 addParameter(
165 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
166 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
167 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
168 .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
169 .build());
170
171 addParameter(
172 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
173 .withDefault(new C2StreamColorAspectsInfo::input(
174 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
175 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
176 .withFields({
177 C2F(mColorAspects, range).inRange(
178 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
179 C2F(mColorAspects, primaries).inRange(
180 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
181 C2F(mColorAspects, transfer).inRange(
182 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
183 C2F(mColorAspects, matrix).inRange(
184 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
185 })
186 .withSetter(ColorAspectsSetter)
187 .build());
188
189 addParameter(
190 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
191 .withDefault(new C2StreamColorAspectsInfo::output(
192 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
193 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
194 .withFields({
195 C2F(mCodedColorAspects, range).inRange(
196 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
197 C2F(mCodedColorAspects, primaries).inRange(
198 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
199 C2F(mCodedColorAspects, transfer).inRange(
200 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
201 C2F(mCodedColorAspects, matrix).inRange(
202 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
203 })
204 .withSetter(CodedColorAspectsSetter, mColorAspects)
205 .build());
Neelkamal Semwale4691832021-05-25 14:00:07 +0530206
207 addParameter(
208 DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
209 .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
210 0 /* flexCount */, 0u /* stream */))
211 .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
212 {C2Config::I_FRAME, C2Config::P_FRAME}),
213 C2F(mPictureQuantization, m.values[0].min).inRange(
214 VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX),
215 C2F(mPictureQuantization, m.values[0].max).inRange(
216 VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX)})
217 .withSetter(PictureQuantizationSetter)
218 .build());
219
Manisha Jajooc743a112021-09-06 20:46:04 +0530220}
221
222C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
223 (void)mayBlock;
224 C2R res = C2R::Ok();
225 if (me.v.value < 4096) {
226 me.set().value = 4096;
227 }
228 return res;
229}
230
231C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
232 const C2P<C2StreamPictureSizeInfo::input>& oldMe,
233 C2P<C2StreamPictureSizeInfo::input>& me) {
234 (void)mayBlock;
235 C2R res = C2R::Ok();
236 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
237 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
238 me.set().width = oldMe.v.width;
239 }
240 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
241 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
242 me.set().height = oldMe.v.height;
243 }
244 return res;
245}
246
247C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530248 C2P<C2StreamProfileLevelInfo::output>& me,
249 const C2P<C2StreamPictureSizeInfo::input>& size,
250 const C2P<C2StreamFrameRateInfo::output>& frameRate,
251 const C2P<C2StreamBitrateInfo::output>& bitrate) {
Manisha Jajooc743a112021-09-06 20:46:04 +0530252 (void)mayBlock;
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530253#ifdef VP9
Manisha Jajooc743a112021-09-06 20:46:04 +0530254 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
255 me.set().profile = PROFILE_VP9_0;
256 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530257 struct LevelLimits {
258 C2Config::level_t level;
259 float samplesPerSec;
260 uint64_t samples;
261 uint32_t bitrate;
262 size_t dimension;
263 };
264 constexpr LevelLimits kLimits[] = {
265 {LEVEL_VP9_1, 829440, 36864, 200000, 512},
266 {LEVEL_VP9_1_1, 2764800, 73728, 800000, 768},
267 {LEVEL_VP9_2, 4608000, 122880, 1800000, 960},
268 {LEVEL_VP9_2_1, 9216000, 245760, 3600000, 1344},
269 {LEVEL_VP9_3, 20736000, 552960, 7200000, 2048},
270 {LEVEL_VP9_3_1, 36864000, 983040, 12000000, 2752},
271 {LEVEL_VP9_4, 83558400, 2228224, 18000000, 4160},
272 {LEVEL_VP9_4_1, 160432128, 2228224, 30000000, 4160},
273 };
274
275 uint64_t samples = size.v.width * size.v.height;
276 float samplesPerSec = float(samples) * frameRate.v.value;
277 size_t dimension = std::max(size.v.width, size.v.height);
278
279 // Check if the supplied level meets the samples / bitrate requirements.
280 // If not, update the level with the lowest level meeting the requirements.
281 bool found = false;
282
283 // By default needsUpdate = false in case the supplied level does meet
284 // the requirements.
285 bool needsUpdate = false;
Ram Mohan094e0252023-04-14 18:41:11 +0530286 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
287 needsUpdate = true;
288 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530289 for (const LevelLimits& limit : kLimits) {
Harish Mahendrakara786d712022-10-03 16:08:57 +0000290 if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
291 bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
292 // This is the lowest level that meets the requirements, and if
293 // we haven't seen the supplied level yet, that means we don't
294 // need the update.
295 if (needsUpdate) {
296 ALOGD("Given level %x does not cover current configuration: "
297 "adjusting to %x",
298 me.v.level, limit.level);
299 me.set().level = limit.level;
300 }
301 found = true;
302 break;
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530303 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530304 if (me.v.level == limit.level) {
305 // We break out of the loop when the lowest feasible level is
306 // found. The fact that we're here means that our level doesn't
307 // meet the requirement and needs to be updated.
308 needsUpdate = true;
309 }
310 }
311 if (!found) {
312 // We set to the highest supported level.
Manisha Jajooc743a112021-09-06 20:46:04 +0530313 me.set().level = LEVEL_VP9_4_1;
314 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530315#else
316 (void)size;
317 (void)frameRate;
318 (void)bitrate;
319 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
320 me.set().profile = PROFILE_VP8_0;
321 }
322 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
323 me.set().level = LEVEL_UNUSED;
324 }
325#endif
Manisha Jajooc743a112021-09-06 20:46:04 +0530326 return C2R::Ok();
327}
328
329C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
330 C2P<C2StreamTemporalLayeringTuning::output>& me) {
331 (void)mayBlock;
332 C2R res = C2R::Ok();
333 if (me.v.m.layerCount > 4) {
334 me.set().m.layerCount = 4;
335 }
336 me.set().m.bLayerCount = 0;
337 // ensure ratios are monotonic and clamped between 0 and 1
338 for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
339 me.set().m.bitrateRatios[ix] = c2_clamp(
340 ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
341 }
342 ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
343 return res;
344}
345
346uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
347 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
348 return 0;
349 }
350 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
351 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
352}
Neelkamal Semwale4691832021-05-25 14:00:07 +0530353
354C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
355 C2P<C2StreamPictureQuantizationTuning::output>
356 &me) {
357 (void)mayBlock;
358 // these are the ones we're going to set, so want them to default
359 // to the DEFAULT values for the codec
360 int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
361 int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
362 for (size_t i = 0; i < me.v.flexCount(); ++i) {
363 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
364 // layerMin is clamped to [VPX_QP_MIN, layerMax] to avoid error
365 // cases where layer.min > layer.max
366 int32_t layerMax = std::clamp(layer.max, VPX_QP_MIN, VPX_QP_MAX);
367 int32_t layerMin = std::clamp(layer.min, VPX_QP_MIN, layerMax);
368 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
369 iMax = layerMax;
370 iMin = layerMin;
371 ALOGV("iMin %d iMax %d", iMin, iMax);
372 } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
373 pMax = layerMax;
374 pMin = layerMin;
375 ALOGV("pMin %d pMax %d", pMin, pMax);
376 }
377 }
378 ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
379 iMin, iMax, pMin, pMax);
380
381 // vpx library takes same range for I/P picture type
382 int32_t maxFrameQP = std::min({iMax, pMax});
383 int32_t minFrameQP = std::max({iMin, pMin});
384 if (minFrameQP > maxFrameQP) {
385 minFrameQP = maxFrameQP;
386 }
387 // put them back into the structure
388 for (size_t i = 0; i < me.v.flexCount(); ++i) {
389 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
390
391 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
392 me.set().m.values[i].max = maxFrameQP;
393 me.set().m.values[i].min = minFrameQP;
394 }
395 else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
396 me.set().m.values[i].max = maxFrameQP;
397 me.set().m.values[i].min = minFrameQP;
398 }
399 }
400 ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
401 minFrameQP, maxFrameQP);
402 return C2R::Ok();
403}
404
Manisha Jajooc743a112021-09-06 20:46:04 +0530405C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
406 C2P<C2StreamColorAspectsInfo::input>& me) {
407 (void)mayBlock;
408 if (me.v.range > C2Color::RANGE_OTHER) {
409 me.set().range = C2Color::RANGE_OTHER;
410 }
411 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
412 me.set().primaries = C2Color::PRIMARIES_OTHER;
413 }
414 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
415 me.set().transfer = C2Color::TRANSFER_OTHER;
416 }
417 if (me.v.matrix > C2Color::MATRIX_OTHER) {
418 me.set().matrix = C2Color::MATRIX_OTHER;
419 }
420 return C2R::Ok();
421}
422C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
423 bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
424 const C2P<C2StreamColorAspectsInfo::input>& coded) {
425 (void)mayBlock;
426 me.set().range = coded.v.range;
427 me.set().primaries = coded.v.primaries;
428 me.set().transfer = coded.v.transfer;
429 me.set().matrix = coded.v.matrix;
430 return C2R::Ok();
431}
432
Pawin Vongmasa36653902018-11-15 00:10:25 -0800433#if 0
434static size_t getCpuCoreCount() {
435 long cpuCoreCount = 1;
436#if defined(_SC_NPROCESSORS_ONLN)
437 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
438#else
439 // _SC_NPROC_ONLN must be defined...
440 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
441#endif
442 CHECK(cpuCoreCount >= 1);
443 ALOGV("Number of CPU cores: %ld", cpuCoreCount);
444 return (size_t)cpuCoreCount;
445}
446#endif
447
448C2SoftVpxEnc::C2SoftVpxEnc(const char* name, c2_node_id_t id,
449 const std::shared_ptr<IntfImpl>& intfImpl)
450 : SimpleC2Component(
451 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
452 mIntf(intfImpl),
453 mCodecContext(nullptr),
454 mCodecConfiguration(nullptr),
455 mCodecInterface(nullptr),
456 mStrideAlign(2),
457 mColorFormat(VPX_IMG_FMT_I420),
458 mBitrateControlMode(VPX_VBR),
459 mErrorResilience(false),
460 mMinQuantizer(0),
461 mMaxQuantizer(0),
462 mTemporalLayers(0),
463 mTemporalPatternType(VPXTemporalLayerPatternNone),
464 mTemporalPatternLength(0),
465 mTemporalPatternIdx(0),
466 mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
467 mSignalledOutputEos(false),
468 mSignalledError(false) {
Harish Mahendrakarda612452020-03-14 17:41:29 -0700469 for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
470 mTemporalLayerBitrateRatio[i] = 1.0f;
471 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800472}
473
474C2SoftVpxEnc::~C2SoftVpxEnc() {
475 onRelease();
476}
477
478c2_status_t C2SoftVpxEnc::onInit() {
479 status_t err = initEncoder();
480 return err == OK ? C2_OK : C2_CORRUPTED;
481}
482
483void C2SoftVpxEnc::onRelease() {
484 if (mCodecContext) {
485 vpx_codec_destroy(mCodecContext);
486 delete mCodecContext;
487 mCodecContext = nullptr;
488 }
489
490 if (mCodecConfiguration) {
491 delete mCodecConfiguration;
492 mCodecConfiguration = nullptr;
493 }
494
495 // this one is not allocated by us
496 mCodecInterface = nullptr;
497}
498
499c2_status_t C2SoftVpxEnc::onStop() {
500 onRelease();
501 mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL;
502 mSignalledOutputEos = false;
503 mSignalledError = false;
504 return C2_OK;
505}
506
507void C2SoftVpxEnc::onReset() {
508 (void)onStop();
509}
510
511c2_status_t C2SoftVpxEnc::onFlush_sm() {
512 return onStop();
513}
514
515status_t C2SoftVpxEnc::initEncoder() {
516 vpx_codec_err_t codec_return;
517 status_t result = UNKNOWN_ERROR;
518 {
519 IntfImpl::Lock lock = mIntf->lock();
520 mSize = mIntf->getSize_l();
521 mBitrate = mIntf->getBitrate_l();
522 mBitrateMode = mIntf->getBitrateMode_l();
523 mFrameRate = mIntf->getFrameRate_l();
524 mIntraRefresh = mIntf->getIntraRefresh_l();
525 mRequestSync = mIntf->getRequestSync_l();
Harish Mahendrakarda612452020-03-14 17:41:29 -0700526 mLayering = mIntf->getTemporalLayers_l();
527 mTemporalLayers = mLayering->m.layerCount;
Neelkamal Semwale4691832021-05-25 14:00:07 +0530528 mQpBounds = mIntf->getPictureQuantization_l();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800529 }
530
531 switch (mBitrateMode->value) {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800532 case C2Config::BITRATE_CONST:
Pawin Vongmasa36653902018-11-15 00:10:25 -0800533 mBitrateControlMode = VPX_CBR;
534 break;
Harish Mahendrakar91cc4832019-06-27 11:25:58 -0700535 case C2Config::BITRATE_VARIABLE:
536 [[fallthrough]];
537 default:
538 mBitrateControlMode = VPX_VBR;
539 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800540 }
541
Neelkamal Semwale4691832021-05-25 14:00:07 +0530542 if (mQpBounds->flexCount() > 0) {
543 // read min max qp for sequence
544 for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
545 const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
546 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
547 mMaxQuantizer = layer.max;
548 mMinQuantizer = layer.min;
549 break;
550 }
551 }
552 }
553
Pawin Vongmasa36653902018-11-15 00:10:25 -0800554 setCodecSpecificInterface();
555 if (!mCodecInterface) goto CleanUp;
556
557 ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
558 (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
559 mMinQuantizer, mMaxQuantizer);
560
561 mCodecConfiguration = new vpx_codec_enc_cfg_t;
562 if (!mCodecConfiguration) goto CleanUp;
563 codec_return = vpx_codec_enc_config_default(mCodecInterface,
564 mCodecConfiguration,
565 0);
566 if (codec_return != VPX_CODEC_OK) {
567 ALOGE("Error populating default configuration for vpx encoder.");
568 goto CleanUp;
569 }
570
571 mCodecConfiguration->g_w = mSize->width;
572 mCodecConfiguration->g_h = mSize->height;
573 //mCodecConfiguration->g_threads = getCpuCoreCount();
574 mCodecConfiguration->g_threads = 0;
575 mCodecConfiguration->g_error_resilient = mErrorResilience;
576
577 // timebase unit is microsecond
578 // g_timebase is in seconds (i.e. 1/1000000 seconds)
579 mCodecConfiguration->g_timebase.num = 1;
580 mCodecConfiguration->g_timebase.den = 1000000;
581 // rc_target_bitrate is in kbps, mBitrate in bps
582 mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
583 mCodecConfiguration->rc_end_usage = mBitrateControlMode;
584 // Disable frame drop - not allowed in MediaCodec now.
585 mCodecConfiguration->rc_dropframe_thresh = 0;
586 // Disable lagged encoding.
587 mCodecConfiguration->g_lag_in_frames = 0;
588 if (mBitrateControlMode == VPX_CBR) {
589 // Disable spatial resizing.
590 mCodecConfiguration->rc_resize_allowed = 0;
591 // Single-pass mode.
592 mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
593 // Maximum amount of bits that can be subtracted from the target
594 // bitrate - expressed as percentage of the target bitrate.
595 mCodecConfiguration->rc_undershoot_pct = 100;
596 // Maximum amount of bits that can be added to the target
597 // bitrate - expressed as percentage of the target bitrate.
598 mCodecConfiguration->rc_overshoot_pct = 15;
599 // Initial value of the buffer level in ms.
600 mCodecConfiguration->rc_buf_initial_sz = 500;
601 // Amount of data that the encoder should try to maintain in ms.
602 mCodecConfiguration->rc_buf_optimal_sz = 600;
603 // The amount of data that may be buffered by the decoding
604 // application in ms.
605 mCodecConfiguration->rc_buf_sz = 1000;
606 // Enable error resilience - needed for packet loss.
607 mCodecConfiguration->g_error_resilient = 1;
608 // Maximum key frame interval - for CBR boost to 3000
609 mCodecConfiguration->kf_max_dist = 3000;
610 // Encoder determines optimal key frame placement automatically.
611 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
612 }
613
614 // Frames temporal pattern - for now WebRTC like pattern is only supported.
615 switch (mTemporalLayers) {
616 case 0:
617 mTemporalPatternLength = 0;
618 break;
619 case 1:
620 mCodecConfiguration->ts_number_layers = 1;
621 mCodecConfiguration->ts_rate_decimator[0] = 1;
622 mCodecConfiguration->ts_periodicity = 1;
623 mCodecConfiguration->ts_layer_id[0] = 0;
624 mTemporalPattern[0] = kTemporalUpdateLastRefAll;
625 mTemporalPatternLength = 1;
626 break;
627 case 2:
628 mCodecConfiguration->ts_number_layers = 2;
629 mCodecConfiguration->ts_rate_decimator[0] = 2;
630 mCodecConfiguration->ts_rate_decimator[1] = 1;
631 mCodecConfiguration->ts_periodicity = 2;
632 mCodecConfiguration->ts_layer_id[0] = 0;
633 mCodecConfiguration->ts_layer_id[1] = 1;
634 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
635 mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
636 mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
637 mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
638 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
639 mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
640 mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
641 mTemporalPattern[7] = kTemporalUpdateNone;
Harish Mahendrakarda612452020-03-14 17:41:29 -0700642 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800643 mTemporalPatternLength = 8;
644 break;
645 case 3:
646 mCodecConfiguration->ts_number_layers = 3;
647 mCodecConfiguration->ts_rate_decimator[0] = 4;
648 mCodecConfiguration->ts_rate_decimator[1] = 2;
649 mCodecConfiguration->ts_rate_decimator[2] = 1;
650 mCodecConfiguration->ts_periodicity = 4;
651 mCodecConfiguration->ts_layer_id[0] = 0;
652 mCodecConfiguration->ts_layer_id[1] = 2;
653 mCodecConfiguration->ts_layer_id[2] = 1;
654 mCodecConfiguration->ts_layer_id[3] = 2;
655 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
656 mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
657 mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
658 mTemporalPattern[3] = kTemporalUpdateNone;
659 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
660 mTemporalPattern[5] = kTemporalUpdateNone;
661 mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
662 mTemporalPattern[7] = kTemporalUpdateNone;
Harish Mahendrakarda612452020-03-14 17:41:29 -0700663 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
664 mTemporalLayerBitrateRatio[1] = mLayering->m.bitrateRatios[1];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800665 mTemporalPatternLength = 8;
666 break;
667 default:
668 ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
669 goto CleanUp;
670 }
671 // Set bitrate values for each layer
672 for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
673 mCodecConfiguration->ts_target_bitrate[i] =
674 mCodecConfiguration->rc_target_bitrate *
Harish Mahendrakarda612452020-03-14 17:41:29 -0700675 mTemporalLayerBitrateRatio[i];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800676 }
677 if (mIntf->getSyncFramePeriod() >= 0) {
678 mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
679 mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
680 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
681 }
682 if (mMinQuantizer > 0) {
683 mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
684 }
685 if (mMaxQuantizer > 0) {
686 mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
687 }
688 setCodecSpecificConfiguration();
689 mCodecContext = new vpx_codec_ctx_t;
690 if (!mCodecContext) goto CleanUp;
691 codec_return = vpx_codec_enc_init(mCodecContext,
692 mCodecInterface,
693 mCodecConfiguration,
694 0); // flags
695 if (codec_return != VPX_CODEC_OK) {
696 ALOGE("Error initializing vpx encoder");
697 goto CleanUp;
698 }
699
700 // Extra CBR settings
701 if (mBitrateControlMode == VPX_CBR) {
702 codec_return = vpx_codec_control(mCodecContext,
703 VP8E_SET_STATIC_THRESHOLD,
704 1);
705 if (codec_return == VPX_CODEC_OK) {
706 uint32_t rc_max_intra_target =
707 (uint32_t)(mCodecConfiguration->rc_buf_optimal_sz * mFrameRate->value / 20 + 0.5);
708 // Don't go below 3 times per frame bandwidth.
709 if (rc_max_intra_target < 300) {
710 rc_max_intra_target = 300;
711 }
712 codec_return = vpx_codec_control(mCodecContext,
713 VP8E_SET_MAX_INTRA_BITRATE_PCT,
714 rc_max_intra_target);
715 }
716 if (codec_return == VPX_CODEC_OK) {
717 codec_return = vpx_codec_control(mCodecContext,
718 VP8E_SET_CPUUSED,
719 -8);
720 }
721 if (codec_return != VPX_CODEC_OK) {
722 ALOGE("Error setting cbr parameters for vpx encoder.");
723 goto CleanUp;
724 }
725 }
726
727 codec_return = setCodecSpecificControls();
728 if (codec_return != VPX_CODEC_OK) goto CleanUp;
729
730 {
731 uint32_t width = mSize->width;
732 uint32_t height = mSize->height;
733 if (((uint64_t)width * height) >
734 ((uint64_t)INT32_MAX / 3)) {
735 ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
736 } else {
737 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
738 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
739 mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2);
740 if (!mConversionBuffer.size()) {
741 ALOGE("Allocating conversion buffer failed.");
742 } else {
743 mNumInputFrames = -1;
744 return OK;
745 }
746 }
747 }
748
749CleanUp:
750 onRelease();
751 return result;
752}
753
754vpx_enc_frame_flags_t C2SoftVpxEnc::getEncodeFlags() {
755 vpx_enc_frame_flags_t flags = 0;
756 if (mTemporalPatternLength > 0) {
757 int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
758 mTemporalPatternIdx++;
759 switch (mTemporalPattern[patternIdx]) {
760 case kTemporalUpdateLast:
761 flags |= VP8_EFLAG_NO_UPD_GF;
762 flags |= VP8_EFLAG_NO_UPD_ARF;
763 flags |= VP8_EFLAG_NO_REF_GF;
764 flags |= VP8_EFLAG_NO_REF_ARF;
765 break;
766 case kTemporalUpdateGoldenWithoutDependency:
767 flags |= VP8_EFLAG_NO_REF_GF;
768 [[fallthrough]];
769 case kTemporalUpdateGolden:
770 flags |= VP8_EFLAG_NO_REF_ARF;
771 flags |= VP8_EFLAG_NO_UPD_ARF;
772 flags |= VP8_EFLAG_NO_UPD_LAST;
773 break;
774 case kTemporalUpdateAltrefWithoutDependency:
775 flags |= VP8_EFLAG_NO_REF_ARF;
776 flags |= VP8_EFLAG_NO_REF_GF;
777 [[fallthrough]];
778 case kTemporalUpdateAltref:
779 flags |= VP8_EFLAG_NO_UPD_GF;
780 flags |= VP8_EFLAG_NO_UPD_LAST;
781 break;
782 case kTemporalUpdateNoneNoRefAltref:
783 flags |= VP8_EFLAG_NO_REF_ARF;
784 [[fallthrough]];
785 case kTemporalUpdateNone:
786 flags |= VP8_EFLAG_NO_UPD_GF;
787 flags |= VP8_EFLAG_NO_UPD_ARF;
788 flags |= VP8_EFLAG_NO_UPD_LAST;
789 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
790 break;
791 case kTemporalUpdateNoneNoRefGoldenRefAltRef:
792 flags |= VP8_EFLAG_NO_REF_GF;
793 flags |= VP8_EFLAG_NO_UPD_GF;
794 flags |= VP8_EFLAG_NO_UPD_ARF;
795 flags |= VP8_EFLAG_NO_UPD_LAST;
796 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
797 break;
798 case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
799 flags |= VP8_EFLAG_NO_REF_GF;
800 flags |= VP8_EFLAG_NO_UPD_ARF;
801 flags |= VP8_EFLAG_NO_UPD_LAST;
802 break;
803 case kTemporalUpdateLastRefAltRef:
804 flags |= VP8_EFLAG_NO_UPD_GF;
805 flags |= VP8_EFLAG_NO_UPD_ARF;
806 flags |= VP8_EFLAG_NO_REF_GF;
807 break;
808 case kTemporalUpdateGoldenRefAltRef:
809 flags |= VP8_EFLAG_NO_UPD_ARF;
810 flags |= VP8_EFLAG_NO_UPD_LAST;
811 break;
812 case kTemporalUpdateLastAndGoldenRefAltRef:
813 flags |= VP8_EFLAG_NO_UPD_ARF;
814 flags |= VP8_EFLAG_NO_REF_GF;
815 break;
816 case kTemporalUpdateLastRefAll:
817 flags |= VP8_EFLAG_NO_UPD_ARF;
818 flags |= VP8_EFLAG_NO_UPD_GF;
819 break;
820 }
821 }
822 return flags;
823}
824
825// TODO: add support for YUV input color formats
826// TODO: add support for SVC, ARF. SVC and ARF returns multiple frames
827// (hierarchical / noshow) in one call. These frames should be combined in to
828// a single buffer and sent back to the client
829void C2SoftVpxEnc::process(
830 const std::unique_ptr<C2Work> &work,
831 const std::shared_ptr<C2BlockPool> &pool) {
832 // Initialize output work
833 work->result = C2_OK;
834 work->workletsProcessed = 1u;
835 work->worklets.front()->output.flags = work->input.flags;
836
837 if (mSignalledError || mSignalledOutputEos) {
838 work->result = C2_BAD_VALUE;
839 return;
840 }
841 // Initialize encoder if not already
842 if (!mCodecContext && OK != initEncoder()) {
843 ALOGE("Failed to initialize encoder");
844 mSignalledError = true;
845 work->result = C2_CORRUPTED;
846 return;
847 }
848
Patryk Bussec2607552022-09-05 11:33:07 +0000849 std::shared_ptr<C2GraphicView> rView;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800850 std::shared_ptr<C2Buffer> inputBuffer;
851 if (!work->input.buffers.empty()) {
852 inputBuffer = work->input.buffers[0];
Patryk Bussec2607552022-09-05 11:33:07 +0000853 rView = std::make_shared<C2GraphicView>(
Pawin Vongmasa36653902018-11-15 00:10:25 -0800854 inputBuffer->data().graphicBlocks().front().map().get());
855 if (rView->error() != C2_OK) {
856 ALOGE("graphic view map err = %d", rView->error());
857 work->result = C2_CORRUPTED;
858 return;
859 }
Patryk Bussec2607552022-09-05 11:33:07 +0000860 //(b/232396154)
861 //workaround for incorrect crop size in view when using surface mode
862 rView->setCrop_be(C2Rect(mSize->width, mSize->height));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800863 } else {
864 ALOGV("Empty input Buffer");
865 uint32_t flags = 0;
866 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
867 flags |= C2FrameData::FLAG_END_OF_STREAM;
868 }
869 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
870 work->worklets.front()->output.buffers.clear();
871 work->worklets.front()->output.ordinal = work->input.ordinal;
872 work->workletsProcessed = 1u;
873 return;
874 }
875
876 const C2ConstGraphicBlock inBuffer =
877 inputBuffer->data().graphicBlocks().front();
Harish Mahendrakar66e98bd2020-06-09 07:45:33 +0530878 if (inBuffer.width() < mSize->width ||
879 inBuffer.height() < mSize->height) {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800880 ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
881 inBuffer.width(), mSize->width, inBuffer.height(),
882 mSize->height);
883 mSignalledError = true;
884 work->result = C2_BAD_VALUE;
885 return;
886 }
887 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
888 vpx_image_t raw_frame;
889 const C2PlanarLayout &layout = rView->layout();
Harish Mahendrakar66e98bd2020-06-09 07:45:33 +0530890 uint32_t width = mSize->width;
891 uint32_t height = mSize->height;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800892 if (width > 0x8000 || height > 0x8000) {
893 ALOGE("Image too big: %u x %u", width, height);
894 work->result = C2_BAD_VALUE;
895 return;
896 }
897 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
898 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
899 switch (layout.type) {
900 case C2PlanarLayout::TYPE_RGB:
901 case C2PlanarLayout::TYPE_RGBA: {
Manisha Jajood9f98442021-06-02 11:27:07 +0530902 std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
903 {
904 IntfImpl::Lock lock = mIntf->lock();
905 colorAspects = mIntf->getCodedColorAspects_l();
906 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800907 ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
Manisha Jajood9f98442021-06-02 11:27:07 +0530908 mConversionBuffer.size(), *rView.get(),
909 colorAspects->matrix, colorAspects->range);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800910 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
911 mStrideAlign, mConversionBuffer.data());
912 break;
913 }
914 case C2PlanarLayout::TYPE_YUV: {
915 if (!IsYUV420(*rView)) {
916 ALOGE("input is not YUV420");
917 work->result = C2_BAD_VALUE;
918 return;
919 }
920
921 if (layout.planes[layout.PLANE_Y].colInc == 1
922 && layout.planes[layout.PLANE_U].colInc == 1
923 && layout.planes[layout.PLANE_V].colInc == 1) {
924 // I420 compatible - though with custom offset and stride
925 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
926 mStrideAlign, (uint8_t*)rView->data()[0]);
927 raw_frame.planes[1] = (uint8_t*)rView->data()[1];
928 raw_frame.planes[2] = (uint8_t*)rView->data()[2];
929 raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
930 raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
931 raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
932 } else {
933 // copy to I420
934 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
935 if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
936 status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
937 if (err != OK) {
938 ALOGE("Buffer conversion failed: %d", err);
939 work->result = C2_BAD_VALUE;
940 return;
941 }
942 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, stride, vstride,
Liu, Kai1386c36f2019-08-22 13:37:41 +0800943 mStrideAlign, mConversionBuffer.data());
Pawin Vongmasa36653902018-11-15 00:10:25 -0800944 vpx_img_set_rect(&raw_frame, 0, 0, width, height);
945 } else {
946 ALOGE("Conversion buffer is too small: %u x %u for %zu",
947 stride, vstride, mConversionBuffer.size());
948 work->result = C2_BAD_VALUE;
949 return;
950 }
951 }
952 break;
953 }
954 default:
955 ALOGE("Unrecognized plane type: %d", layout.type);
956 work->result = C2_BAD_VALUE;
957 return;
958 }
959
960 vpx_enc_frame_flags_t flags = getEncodeFlags();
961 // handle dynamic config parameters
962 {
963 IntfImpl::Lock lock = mIntf->lock();
964 std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
965 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
966 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
967 lock.unlock();
968
969 if (intraRefresh != mIntraRefresh) {
970 mIntraRefresh = intraRefresh;
971 ALOGV("Got mIntraRefresh request");
972 }
973
974 if (requestSync != mRequestSync) {
975 // we can handle IDR immediately
976 if (requestSync->value) {
977 // unset request
978 C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
979 std::vector<std::unique_ptr<C2SettingResult>> failures;
980 mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
981 ALOGV("Got sync request");
982 flags |= VPX_EFLAG_FORCE_KF;
983 }
984 mRequestSync = requestSync;
985 }
986
987 if (bitrate != mBitrate) {
988 mBitrate = bitrate;
989 mCodecConfiguration->rc_target_bitrate =
990 (mBitrate->value + 500) / 1000;
991 vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
992 mCodecConfiguration);
993 if (res != VPX_CODEC_OK) {
994 ALOGE("vpx encoder failed to update bitrate: %s",
995 vpx_codec_err_to_string(res));
996 mSignalledError = true;
997 work->result = C2_CORRUPTED;
998 return;
999 }
1000 }
1001 }
1002
1003 uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
1004 uint32_t frameDuration;
1005 if (inputTimeStamp > mLastTimestamp) {
1006 frameDuration = (uint32_t)(inputTimeStamp - mLastTimestamp);
1007 } else {
1008 // Use default of 30 fps in case of 0 frame rate.
1009 float frameRate = mFrameRate->value;
1010 if (frameRate < 0.001) {
1011 frameRate = 30;
1012 }
1013 frameDuration = (uint32_t)(1000000 / frameRate + 0.5);
1014 }
1015 mLastTimestamp = inputTimeStamp;
1016
1017 vpx_codec_err_t codec_return = vpx_codec_encode(mCodecContext, &raw_frame,
1018 inputTimeStamp,
1019 frameDuration, flags,
1020 VPX_DL_REALTIME);
1021 if (codec_return != VPX_CODEC_OK) {
1022 ALOGE("vpx encoder failed to encode frame");
1023 mSignalledError = true;
1024 work->result = C2_CORRUPTED;
1025 return;
1026 }
1027
1028 bool populated = false;
1029 vpx_codec_iter_t encoded_packet_iterator = nullptr;
1030 const vpx_codec_cx_pkt_t* encoded_packet;
1031 while ((encoded_packet = vpx_codec_get_cx_data(
1032 mCodecContext, &encoded_packet_iterator))) {
1033 if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
1034 std::shared_ptr<C2LinearBlock> block;
1035 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
1036 c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
1037 if (err != C2_OK) {
1038 ALOGE("fetchLinearBlock for Output failed with status %d", err);
1039 work->result = C2_NO_MEMORY;
1040 return;
1041 }
1042 C2WriteView wView = block->map().get();
1043 if (wView.error()) {
1044 ALOGE("write view map failed %d", wView.error());
1045 work->result = C2_CORRUPTED;
1046 return;
1047 }
1048
1049 memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
1050 ++mNumInputFrames;
1051
1052 ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
1053 uint32_t flags = 0;
1054 if (eos) {
1055 flags |= C2FrameData::FLAG_END_OF_STREAM;
1056 }
1057 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
1058 work->worklets.front()->output.buffers.clear();
Lajos Molnar2d83c002021-06-09 16:25:07 -07001059 std::shared_ptr<C2Buffer> buffer =
1060 createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
Pawin Vongmasa36653902018-11-15 00:10:25 -08001061 if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
1062 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
Lajos Molnar3bb81cd2019-02-20 15:10:30 -08001063 0u /* stream id */, C2Config::SYNC_FRAME));
Pawin Vongmasa36653902018-11-15 00:10:25 -08001064 }
1065 work->worklets.front()->output.buffers.push_back(buffer);
1066 work->worklets.front()->output.ordinal = work->input.ordinal;
1067 work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
1068 work->workletsProcessed = 1u;
1069 populated = true;
1070 if (eos) {
1071 mSignalledOutputEos = true;
1072 ALOGV("signalled EOS");
1073 }
1074 }
1075 }
1076 if (!populated) {
1077 work->workletsProcessed = 0u;
1078 }
1079}
1080
1081c2_status_t C2SoftVpxEnc::drain(
1082 uint32_t drainMode,
1083 const std::shared_ptr<C2BlockPool> &pool) {
1084 (void)pool;
1085 if (drainMode == NO_DRAIN) {
1086 ALOGW("drain with NO_DRAIN: no-op");
1087 return C2_OK;
1088 }
1089 if (drainMode == DRAIN_CHAIN) {
1090 ALOGW("DRAIN_CHAIN not supported");
1091 return C2_OMITTED;
1092 }
1093
1094 return C2_OK;
1095}
1096
1097} // namespace android