blob: 66ce5ea1baa9cee1f3290161909414bec3eebaa9 [file] [log] [blame]
Pawin Vongmasa36653902018-11-15 00:10:25 -08001/*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "C2SoftVpxEnc"
19#include <log/log.h>
20#include <utils/misc.h>
21
22#include <media/hardware/VideoAPI.h>
23
24#include <Codec2BufferUtils.h>
Harish Mahendrakarfc3f7412024-03-21 17:29:37 +000025#include <Codec2CommonUtils.h>
Pawin Vongmasa36653902018-11-15 00:10:25 -080026#include <C2Debug.h>
27#include "C2SoftVpxEnc.h"
28
29#ifndef INT32_MAX
30#define INT32_MAX 2147483647
31#endif
32
Neelkamal Semwale4691832021-05-25 14:00:07 +053033/* Quantization param values defined by the spec */
34#define VPX_QP_MIN 0
35#define VPX_QP_MAX 63
36#define VPX_QP_DEFAULT_MIN VPX_QP_MIN
37#define VPX_QP_DEFAULT_MAX VPX_QP_MAX
38
Pawin Vongmasa36653902018-11-15 00:10:25 -080039namespace android {
40
Manisha Jajooc743a112021-09-06 20:46:04 +053041C2SoftVpxEnc::IntfImpl::IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
42 : SimpleInterface<void>::BaseParams(
43 helper,
44 COMPONENT_NAME,
45 C2Component::KIND_ENCODER,
46 C2Component::DOMAIN_VIDEO,
47 MEDIA_MIMETYPE_VIDEO) {
48 noPrivateBuffers(); // TODO: account for our buffers here
49 noInputReferences();
50 noOutputReferences();
51 noInputLatency();
52 noTimeStretch();
53 setDerivedInstance(this);
54
55 addParameter(
56 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
57 .withConstValue(new C2ComponentAttributesSetting(
58 C2Component::ATTRIB_IS_TEMPORAL))
59 .build());
60
61 addParameter(
62 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
63 .withConstValue(new C2StreamUsageTuning::input(
64 0u, (uint64_t)C2MemoryUsage::CPU_READ))
65 .build());
66
Harish Mahendrakarfc3f7412024-03-21 17:29:37 +000067 // Odd dimension support in encoders requires Android V and above
68 size_t stepSize = isAtLeastV() ? 1 : 2;
Manisha Jajooc743a112021-09-06 20:46:04 +053069 addParameter(
70 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
Manisha Jajoodf62e0f2021-11-12 18:57:28 +053071 .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
Manisha Jajooc743a112021-09-06 20:46:04 +053072 .withFields({
Harish Mahendrakarfc3f7412024-03-21 17:29:37 +000073 C2F(mSize, width).inRange(2, 2048, stepSize),
74 C2F(mSize, height).inRange(2, 2048, stepSize),
Manisha Jajooc743a112021-09-06 20:46:04 +053075 })
76 .withSetter(SizeSetter)
77 .build());
78
79 addParameter(
80 DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
81 .withDefault(new C2StreamBitrateModeTuning::output(
82 0u, C2Config::BITRATE_VARIABLE))
83 .withFields({
84 C2F(mBitrateMode, value).oneOf({
85 C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE })
86 })
87 .withSetter(
88 Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
89 .build());
90
91 addParameter(
92 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
Manisha Jajoodf62e0f2021-11-12 18:57:28 +053093 .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
Manisha Jajooc743a112021-09-06 20:46:04 +053094 // TODO: More restriction?
95 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
96 .withSetter(
97 Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
98 .build());
99
100 addParameter(
101 DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING)
102 .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0))
103 .withFields({
104 C2F(mLayering, m.layerCount).inRange(0, 4),
105 C2F(mLayering, m.bLayerCount).inRange(0, 0),
106 C2F(mLayering, m.bitrateRatios).inRange(0., 1.)
107 })
108 .withSetter(LayeringSetter)
109 .build());
110
111 addParameter(
112 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
113 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
114 .withFields({C2F(mSyncFramePeriod, value).any()})
115 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
116 .build());
117
118 addParameter(
119 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
120 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
121 .withFields({C2F(mBitrate, value).inRange(4096, 40000000)})
122 .withSetter(BitrateSetter)
123 .build());
124
125 addParameter(
126 DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
127 .withConstValue(new C2StreamIntraRefreshTuning::output(
128 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
129 .build());
130#ifdef VP9
131 addParameter(
132 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
133 .withDefault(new C2StreamProfileLevelInfo::output(
134 0u, PROFILE_VP9_0, LEVEL_VP9_4_1))
135 .withFields({
136 C2F(mProfileLevel, profile).equalTo(
137 PROFILE_VP9_0
138 ),
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530139 C2F(mProfileLevel, level).oneOf({
140 C2Config::LEVEL_VP9_1,
141 C2Config::LEVEL_VP9_1_1,
142 C2Config::LEVEL_VP9_2,
143 C2Config::LEVEL_VP9_2_1,
144 C2Config::LEVEL_VP9_3,
145 C2Config::LEVEL_VP9_3_1,
146 C2Config::LEVEL_VP9_4,
147 C2Config::LEVEL_VP9_4_1,
148 }),
Manisha Jajooc743a112021-09-06 20:46:04 +0530149 })
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530150 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
Manisha Jajooc743a112021-09-06 20:46:04 +0530151 .build());
152#else
153 addParameter(
154 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
155 .withDefault(new C2StreamProfileLevelInfo::output(
156 0u, PROFILE_VP8_0, LEVEL_UNUSED))
157 .withFields({
158 C2F(mProfileLevel, profile).equalTo(
159 PROFILE_VP8_0
160 ),
161 C2F(mProfileLevel, level).equalTo(
162 LEVEL_UNUSED),
163 })
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530164 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
Manisha Jajooc743a112021-09-06 20:46:04 +0530165 .build());
166#endif
167 addParameter(
168 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
169 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
170 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
171 .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
172 .build());
173
174 addParameter(
175 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
176 .withDefault(new C2StreamColorAspectsInfo::input(
177 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
178 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
179 .withFields({
180 C2F(mColorAspects, range).inRange(
181 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
182 C2F(mColorAspects, primaries).inRange(
183 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
184 C2F(mColorAspects, transfer).inRange(
185 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
186 C2F(mColorAspects, matrix).inRange(
187 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
188 })
189 .withSetter(ColorAspectsSetter)
190 .build());
191
192 addParameter(
193 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
194 .withDefault(new C2StreamColorAspectsInfo::output(
195 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
196 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
197 .withFields({
198 C2F(mCodedColorAspects, range).inRange(
199 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
200 C2F(mCodedColorAspects, primaries).inRange(
201 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
202 C2F(mCodedColorAspects, transfer).inRange(
203 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
204 C2F(mCodedColorAspects, matrix).inRange(
205 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
206 })
207 .withSetter(CodedColorAspectsSetter, mColorAspects)
208 .build());
Neelkamal Semwale4691832021-05-25 14:00:07 +0530209
210 addParameter(
211 DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
212 .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
213 0 /* flexCount */, 0u /* stream */))
214 .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
215 {C2Config::I_FRAME, C2Config::P_FRAME}),
216 C2F(mPictureQuantization, m.values[0].min).inRange(
217 VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX),
218 C2F(mPictureQuantization, m.values[0].max).inRange(
219 VPX_QP_DEFAULT_MIN, VPX_QP_DEFAULT_MAX)})
220 .withSetter(PictureQuantizationSetter)
221 .build());
222
Manisha Jajooc743a112021-09-06 20:46:04 +0530223}
224
225C2R C2SoftVpxEnc::IntfImpl::BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
226 (void)mayBlock;
227 C2R res = C2R::Ok();
228 if (me.v.value < 4096) {
229 me.set().value = 4096;
230 }
231 return res;
232}
233
234C2R C2SoftVpxEnc::IntfImpl::SizeSetter(bool mayBlock,
235 const C2P<C2StreamPictureSizeInfo::input>& oldMe,
236 C2P<C2StreamPictureSizeInfo::input>& me) {
237 (void)mayBlock;
238 C2R res = C2R::Ok();
239 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
240 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
241 me.set().width = oldMe.v.width;
242 }
243 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
244 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
245 me.set().height = oldMe.v.height;
246 }
247 return res;
248}
249
250C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530251 C2P<C2StreamProfileLevelInfo::output>& me,
252 const C2P<C2StreamPictureSizeInfo::input>& size,
253 const C2P<C2StreamFrameRateInfo::output>& frameRate,
254 const C2P<C2StreamBitrateInfo::output>& bitrate) {
Manisha Jajooc743a112021-09-06 20:46:04 +0530255 (void)mayBlock;
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530256#ifdef VP9
Manisha Jajooc743a112021-09-06 20:46:04 +0530257 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
258 me.set().profile = PROFILE_VP9_0;
259 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530260 struct LevelLimits {
261 C2Config::level_t level;
262 float samplesPerSec;
263 uint64_t samples;
264 uint32_t bitrate;
265 size_t dimension;
266 };
267 constexpr LevelLimits kLimits[] = {
268 {LEVEL_VP9_1, 829440, 36864, 200000, 512},
269 {LEVEL_VP9_1_1, 2764800, 73728, 800000, 768},
270 {LEVEL_VP9_2, 4608000, 122880, 1800000, 960},
271 {LEVEL_VP9_2_1, 9216000, 245760, 3600000, 1344},
272 {LEVEL_VP9_3, 20736000, 552960, 7200000, 2048},
273 {LEVEL_VP9_3_1, 36864000, 983040, 12000000, 2752},
274 {LEVEL_VP9_4, 83558400, 2228224, 18000000, 4160},
275 {LEVEL_VP9_4_1, 160432128, 2228224, 30000000, 4160},
276 };
277
278 uint64_t samples = size.v.width * size.v.height;
279 float samplesPerSec = float(samples) * frameRate.v.value;
280 size_t dimension = std::max(size.v.width, size.v.height);
281
282 // Check if the supplied level meets the samples / bitrate requirements.
283 // If not, update the level with the lowest level meeting the requirements.
284 bool found = false;
285
286 // By default needsUpdate = false in case the supplied level does meet
287 // the requirements.
288 bool needsUpdate = false;
Ram Mohan094e0252023-04-14 18:41:11 +0530289 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
290 needsUpdate = true;
291 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530292 for (const LevelLimits& limit : kLimits) {
Harish Mahendrakara786d712022-10-03 16:08:57 +0000293 if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
294 bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
295 // This is the lowest level that meets the requirements, and if
296 // we haven't seen the supplied level yet, that means we don't
297 // need the update.
298 if (needsUpdate) {
299 ALOGD("Given level %x does not cover current configuration: "
300 "adjusting to %x",
301 me.v.level, limit.level);
302 me.set().level = limit.level;
303 }
304 found = true;
305 break;
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530306 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530307 if (me.v.level == limit.level) {
308 // We break out of the loop when the lowest feasible level is
309 // found. The fact that we're here means that our level doesn't
310 // meet the requirement and needs to be updated.
311 needsUpdate = true;
312 }
313 }
314 if (!found) {
315 // We set to the highest supported level.
Manisha Jajooc743a112021-09-06 20:46:04 +0530316 me.set().level = LEVEL_VP9_4_1;
317 }
Manisha Jajoodf62e0f2021-11-12 18:57:28 +0530318#else
319 (void)size;
320 (void)frameRate;
321 (void)bitrate;
322 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
323 me.set().profile = PROFILE_VP8_0;
324 }
325 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
326 me.set().level = LEVEL_UNUSED;
327 }
328#endif
Manisha Jajooc743a112021-09-06 20:46:04 +0530329 return C2R::Ok();
330}
331
332C2R C2SoftVpxEnc::IntfImpl::LayeringSetter(bool mayBlock,
333 C2P<C2StreamTemporalLayeringTuning::output>& me) {
334 (void)mayBlock;
335 C2R res = C2R::Ok();
336 if (me.v.m.layerCount > 4) {
337 me.set().m.layerCount = 4;
338 }
339 me.set().m.bLayerCount = 0;
340 // ensure ratios are monotonic and clamped between 0 and 1
341 for (size_t ix = 0; ix < me.v.flexCount(); ++ix) {
342 me.set().m.bitrateRatios[ix] = c2_clamp(
343 ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.);
344 }
345 ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount);
346 return res;
347}
348
349uint32_t C2SoftVpxEnc::IntfImpl::getSyncFramePeriod() const {
350 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
351 return 0;
352 }
353 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
354 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
355}
Neelkamal Semwale4691832021-05-25 14:00:07 +0530356
357C2R C2SoftVpxEnc::IntfImpl::PictureQuantizationSetter(bool mayBlock,
358 C2P<C2StreamPictureQuantizationTuning::output>
359 &me) {
360 (void)mayBlock;
361 // these are the ones we're going to set, so want them to default
362 // to the DEFAULT values for the codec
363 int32_t iMin = VPX_QP_DEFAULT_MIN, pMin = VPX_QP_DEFAULT_MIN;
364 int32_t iMax = VPX_QP_DEFAULT_MAX, pMax = VPX_QP_DEFAULT_MAX;
365 for (size_t i = 0; i < me.v.flexCount(); ++i) {
366 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
367 // layerMin is clamped to [VPX_QP_MIN, layerMax] to avoid error
368 // cases where layer.min > layer.max
369 int32_t layerMax = std::clamp(layer.max, VPX_QP_MIN, VPX_QP_MAX);
370 int32_t layerMin = std::clamp(layer.min, VPX_QP_MIN, layerMax);
371 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
372 iMax = layerMax;
373 iMin = layerMin;
374 ALOGV("iMin %d iMax %d", iMin, iMax);
375 } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
376 pMax = layerMax;
377 pMin = layerMin;
378 ALOGV("pMin %d pMax %d", pMin, pMax);
379 }
380 }
381 ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d",
382 iMin, iMax, pMin, pMax);
383
384 // vpx library takes same range for I/P picture type
385 int32_t maxFrameQP = std::min({iMax, pMax});
386 int32_t minFrameQP = std::max({iMin, pMin});
387 if (minFrameQP > maxFrameQP) {
388 minFrameQP = maxFrameQP;
389 }
390 // put them back into the structure
391 for (size_t i = 0; i < me.v.flexCount(); ++i) {
392 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
393
394 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
395 me.set().m.values[i].max = maxFrameQP;
396 me.set().m.values[i].min = minFrameQP;
397 }
398 else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
399 me.set().m.values[i].max = maxFrameQP;
400 me.set().m.values[i].min = minFrameQP;
401 }
402 }
403 ALOGV("PictureQuantizationSetter(exit): minFrameQP = %d maxFrameQP = %d",
404 minFrameQP, maxFrameQP);
405 return C2R::Ok();
406}
407
Manisha Jajooc743a112021-09-06 20:46:04 +0530408C2R C2SoftVpxEnc::IntfImpl::ColorAspectsSetter(bool mayBlock,
409 C2P<C2StreamColorAspectsInfo::input>& me) {
410 (void)mayBlock;
411 if (me.v.range > C2Color::RANGE_OTHER) {
412 me.set().range = C2Color::RANGE_OTHER;
413 }
414 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
415 me.set().primaries = C2Color::PRIMARIES_OTHER;
416 }
417 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
418 me.set().transfer = C2Color::TRANSFER_OTHER;
419 }
420 if (me.v.matrix > C2Color::MATRIX_OTHER) {
421 me.set().matrix = C2Color::MATRIX_OTHER;
422 }
423 return C2R::Ok();
424}
425C2R C2SoftVpxEnc::IntfImpl::CodedColorAspectsSetter(
426 bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
427 const C2P<C2StreamColorAspectsInfo::input>& coded) {
428 (void)mayBlock;
429 me.set().range = coded.v.range;
430 me.set().primaries = coded.v.primaries;
431 me.set().transfer = coded.v.transfer;
432 me.set().matrix = coded.v.matrix;
433 return C2R::Ok();
434}
435
Pawin Vongmasa36653902018-11-15 00:10:25 -0800436#if 0
437static size_t getCpuCoreCount() {
438 long cpuCoreCount = 1;
439#if defined(_SC_NPROCESSORS_ONLN)
440 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
441#else
442 // _SC_NPROC_ONLN must be defined...
443 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
444#endif
445 CHECK(cpuCoreCount >= 1);
446 ALOGV("Number of CPU cores: %ld", cpuCoreCount);
447 return (size_t)cpuCoreCount;
448}
449#endif
450
451C2SoftVpxEnc::C2SoftVpxEnc(const char* name, c2_node_id_t id,
452 const std::shared_ptr<IntfImpl>& intfImpl)
453 : SimpleC2Component(
454 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
455 mIntf(intfImpl),
456 mCodecContext(nullptr),
457 mCodecConfiguration(nullptr),
458 mCodecInterface(nullptr),
459 mStrideAlign(2),
460 mColorFormat(VPX_IMG_FMT_I420),
461 mBitrateControlMode(VPX_VBR),
462 mErrorResilience(false),
463 mMinQuantizer(0),
464 mMaxQuantizer(0),
465 mTemporalLayers(0),
466 mTemporalPatternType(VPXTemporalLayerPatternNone),
467 mTemporalPatternLength(0),
468 mTemporalPatternIdx(0),
469 mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
470 mSignalledOutputEos(false),
471 mSignalledError(false) {
Harish Mahendrakarda612452020-03-14 17:41:29 -0700472 for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
473 mTemporalLayerBitrateRatio[i] = 1.0f;
474 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800475}
476
477C2SoftVpxEnc::~C2SoftVpxEnc() {
478 onRelease();
479}
480
481c2_status_t C2SoftVpxEnc::onInit() {
482 status_t err = initEncoder();
483 return err == OK ? C2_OK : C2_CORRUPTED;
484}
485
486void C2SoftVpxEnc::onRelease() {
487 if (mCodecContext) {
488 vpx_codec_destroy(mCodecContext);
489 delete mCodecContext;
490 mCodecContext = nullptr;
491 }
492
493 if (mCodecConfiguration) {
494 delete mCodecConfiguration;
495 mCodecConfiguration = nullptr;
496 }
497
498 // this one is not allocated by us
499 mCodecInterface = nullptr;
500}
501
502c2_status_t C2SoftVpxEnc::onStop() {
503 onRelease();
504 mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL;
505 mSignalledOutputEos = false;
506 mSignalledError = false;
507 return C2_OK;
508}
509
510void C2SoftVpxEnc::onReset() {
511 (void)onStop();
512}
513
514c2_status_t C2SoftVpxEnc::onFlush_sm() {
515 return onStop();
516}
517
518status_t C2SoftVpxEnc::initEncoder() {
519 vpx_codec_err_t codec_return;
520 status_t result = UNKNOWN_ERROR;
521 {
522 IntfImpl::Lock lock = mIntf->lock();
523 mSize = mIntf->getSize_l();
524 mBitrate = mIntf->getBitrate_l();
525 mBitrateMode = mIntf->getBitrateMode_l();
526 mFrameRate = mIntf->getFrameRate_l();
527 mIntraRefresh = mIntf->getIntraRefresh_l();
528 mRequestSync = mIntf->getRequestSync_l();
Harish Mahendrakarda612452020-03-14 17:41:29 -0700529 mLayering = mIntf->getTemporalLayers_l();
530 mTemporalLayers = mLayering->m.layerCount;
Neelkamal Semwale4691832021-05-25 14:00:07 +0530531 mQpBounds = mIntf->getPictureQuantization_l();
Pawin Vongmasa36653902018-11-15 00:10:25 -0800532 }
533
534 switch (mBitrateMode->value) {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800535 case C2Config::BITRATE_CONST:
Pawin Vongmasa36653902018-11-15 00:10:25 -0800536 mBitrateControlMode = VPX_CBR;
537 break;
Harish Mahendrakar91cc4832019-06-27 11:25:58 -0700538 case C2Config::BITRATE_VARIABLE:
539 [[fallthrough]];
540 default:
541 mBitrateControlMode = VPX_VBR;
542 break;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800543 }
544
Neelkamal Semwale4691832021-05-25 14:00:07 +0530545 if (mQpBounds->flexCount() > 0) {
546 // read min max qp for sequence
547 for (size_t i = 0; i < mQpBounds->flexCount(); ++i) {
548 const C2PictureQuantizationStruct &layer = mQpBounds->m.values[i];
549 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
550 mMaxQuantizer = layer.max;
551 mMinQuantizer = layer.min;
552 break;
553 }
554 }
555 }
556
Pawin Vongmasa36653902018-11-15 00:10:25 -0800557 setCodecSpecificInterface();
558 if (!mCodecInterface) goto CleanUp;
559
560 ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
561 (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
562 mMinQuantizer, mMaxQuantizer);
563
564 mCodecConfiguration = new vpx_codec_enc_cfg_t;
565 if (!mCodecConfiguration) goto CleanUp;
566 codec_return = vpx_codec_enc_config_default(mCodecInterface,
567 mCodecConfiguration,
568 0);
569 if (codec_return != VPX_CODEC_OK) {
570 ALOGE("Error populating default configuration for vpx encoder.");
571 goto CleanUp;
572 }
573
574 mCodecConfiguration->g_w = mSize->width;
575 mCodecConfiguration->g_h = mSize->height;
576 //mCodecConfiguration->g_threads = getCpuCoreCount();
577 mCodecConfiguration->g_threads = 0;
578 mCodecConfiguration->g_error_resilient = mErrorResilience;
579
580 // timebase unit is microsecond
581 // g_timebase is in seconds (i.e. 1/1000000 seconds)
582 mCodecConfiguration->g_timebase.num = 1;
583 mCodecConfiguration->g_timebase.den = 1000000;
584 // rc_target_bitrate is in kbps, mBitrate in bps
585 mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000;
586 mCodecConfiguration->rc_end_usage = mBitrateControlMode;
587 // Disable frame drop - not allowed in MediaCodec now.
588 mCodecConfiguration->rc_dropframe_thresh = 0;
589 // Disable lagged encoding.
590 mCodecConfiguration->g_lag_in_frames = 0;
591 if (mBitrateControlMode == VPX_CBR) {
592 // Disable spatial resizing.
593 mCodecConfiguration->rc_resize_allowed = 0;
594 // Single-pass mode.
595 mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
596 // Maximum amount of bits that can be subtracted from the target
597 // bitrate - expressed as percentage of the target bitrate.
598 mCodecConfiguration->rc_undershoot_pct = 100;
599 // Maximum amount of bits that can be added to the target
600 // bitrate - expressed as percentage of the target bitrate.
601 mCodecConfiguration->rc_overshoot_pct = 15;
602 // Initial value of the buffer level in ms.
603 mCodecConfiguration->rc_buf_initial_sz = 500;
604 // Amount of data that the encoder should try to maintain in ms.
605 mCodecConfiguration->rc_buf_optimal_sz = 600;
606 // The amount of data that may be buffered by the decoding
607 // application in ms.
608 mCodecConfiguration->rc_buf_sz = 1000;
609 // Enable error resilience - needed for packet loss.
610 mCodecConfiguration->g_error_resilient = 1;
611 // Maximum key frame interval - for CBR boost to 3000
612 mCodecConfiguration->kf_max_dist = 3000;
613 // Encoder determines optimal key frame placement automatically.
614 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
615 }
616
617 // Frames temporal pattern - for now WebRTC like pattern is only supported.
618 switch (mTemporalLayers) {
619 case 0:
620 mTemporalPatternLength = 0;
621 break;
622 case 1:
623 mCodecConfiguration->ts_number_layers = 1;
624 mCodecConfiguration->ts_rate_decimator[0] = 1;
625 mCodecConfiguration->ts_periodicity = 1;
626 mCodecConfiguration->ts_layer_id[0] = 0;
627 mTemporalPattern[0] = kTemporalUpdateLastRefAll;
628 mTemporalPatternLength = 1;
629 break;
630 case 2:
631 mCodecConfiguration->ts_number_layers = 2;
632 mCodecConfiguration->ts_rate_decimator[0] = 2;
633 mCodecConfiguration->ts_rate_decimator[1] = 1;
634 mCodecConfiguration->ts_periodicity = 2;
635 mCodecConfiguration->ts_layer_id[0] = 0;
636 mCodecConfiguration->ts_layer_id[1] = 1;
637 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
638 mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
639 mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
640 mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
641 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
642 mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
643 mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
644 mTemporalPattern[7] = kTemporalUpdateNone;
Harish Mahendrakarda612452020-03-14 17:41:29 -0700645 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800646 mTemporalPatternLength = 8;
647 break;
648 case 3:
649 mCodecConfiguration->ts_number_layers = 3;
650 mCodecConfiguration->ts_rate_decimator[0] = 4;
651 mCodecConfiguration->ts_rate_decimator[1] = 2;
652 mCodecConfiguration->ts_rate_decimator[2] = 1;
653 mCodecConfiguration->ts_periodicity = 4;
654 mCodecConfiguration->ts_layer_id[0] = 0;
655 mCodecConfiguration->ts_layer_id[1] = 2;
656 mCodecConfiguration->ts_layer_id[2] = 1;
657 mCodecConfiguration->ts_layer_id[3] = 2;
658 mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
659 mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
660 mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
661 mTemporalPattern[3] = kTemporalUpdateNone;
662 mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
663 mTemporalPattern[5] = kTemporalUpdateNone;
664 mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
665 mTemporalPattern[7] = kTemporalUpdateNone;
Harish Mahendrakarda612452020-03-14 17:41:29 -0700666 mTemporalLayerBitrateRatio[0] = mLayering->m.bitrateRatios[0];
667 mTemporalLayerBitrateRatio[1] = mLayering->m.bitrateRatios[1];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800668 mTemporalPatternLength = 8;
669 break;
670 default:
671 ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
672 goto CleanUp;
673 }
674 // Set bitrate values for each layer
675 for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
676 mCodecConfiguration->ts_target_bitrate[i] =
677 mCodecConfiguration->rc_target_bitrate *
Harish Mahendrakarda612452020-03-14 17:41:29 -0700678 mTemporalLayerBitrateRatio[i];
Pawin Vongmasa36653902018-11-15 00:10:25 -0800679 }
680 if (mIntf->getSyncFramePeriod() >= 0) {
681 mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod();
682 mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod();
683 mCodecConfiguration->kf_mode = VPX_KF_AUTO;
684 }
685 if (mMinQuantizer > 0) {
686 mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
687 }
688 if (mMaxQuantizer > 0) {
689 mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
690 }
691 setCodecSpecificConfiguration();
692 mCodecContext = new vpx_codec_ctx_t;
693 if (!mCodecContext) goto CleanUp;
694 codec_return = vpx_codec_enc_init(mCodecContext,
695 mCodecInterface,
696 mCodecConfiguration,
697 0); // flags
698 if (codec_return != VPX_CODEC_OK) {
699 ALOGE("Error initializing vpx encoder");
700 goto CleanUp;
701 }
702
703 // Extra CBR settings
704 if (mBitrateControlMode == VPX_CBR) {
705 codec_return = vpx_codec_control(mCodecContext,
706 VP8E_SET_STATIC_THRESHOLD,
707 1);
708 if (codec_return == VPX_CODEC_OK) {
709 uint32_t rc_max_intra_target =
710 (uint32_t)(mCodecConfiguration->rc_buf_optimal_sz * mFrameRate->value / 20 + 0.5);
711 // Don't go below 3 times per frame bandwidth.
712 if (rc_max_intra_target < 300) {
713 rc_max_intra_target = 300;
714 }
715 codec_return = vpx_codec_control(mCodecContext,
716 VP8E_SET_MAX_INTRA_BITRATE_PCT,
717 rc_max_intra_target);
718 }
719 if (codec_return == VPX_CODEC_OK) {
720 codec_return = vpx_codec_control(mCodecContext,
721 VP8E_SET_CPUUSED,
722 -8);
723 }
724 if (codec_return != VPX_CODEC_OK) {
725 ALOGE("Error setting cbr parameters for vpx encoder.");
726 goto CleanUp;
727 }
728 }
729
730 codec_return = setCodecSpecificControls();
731 if (codec_return != VPX_CODEC_OK) goto CleanUp;
732
733 {
734 uint32_t width = mSize->width;
735 uint32_t height = mSize->height;
736 if (((uint64_t)width * height) >
737 ((uint64_t)INT32_MAX / 3)) {
738 ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height);
739 } else {
740 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
741 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
742 mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2);
743 if (!mConversionBuffer.size()) {
744 ALOGE("Allocating conversion buffer failed.");
745 } else {
746 mNumInputFrames = -1;
747 return OK;
748 }
749 }
750 }
751
752CleanUp:
753 onRelease();
754 return result;
755}
756
757vpx_enc_frame_flags_t C2SoftVpxEnc::getEncodeFlags() {
758 vpx_enc_frame_flags_t flags = 0;
759 if (mTemporalPatternLength > 0) {
760 int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
761 mTemporalPatternIdx++;
762 switch (mTemporalPattern[patternIdx]) {
763 case kTemporalUpdateLast:
764 flags |= VP8_EFLAG_NO_UPD_GF;
765 flags |= VP8_EFLAG_NO_UPD_ARF;
766 flags |= VP8_EFLAG_NO_REF_GF;
767 flags |= VP8_EFLAG_NO_REF_ARF;
768 break;
769 case kTemporalUpdateGoldenWithoutDependency:
770 flags |= VP8_EFLAG_NO_REF_GF;
771 [[fallthrough]];
772 case kTemporalUpdateGolden:
773 flags |= VP8_EFLAG_NO_REF_ARF;
774 flags |= VP8_EFLAG_NO_UPD_ARF;
775 flags |= VP8_EFLAG_NO_UPD_LAST;
776 break;
777 case kTemporalUpdateAltrefWithoutDependency:
778 flags |= VP8_EFLAG_NO_REF_ARF;
779 flags |= VP8_EFLAG_NO_REF_GF;
780 [[fallthrough]];
781 case kTemporalUpdateAltref:
782 flags |= VP8_EFLAG_NO_UPD_GF;
783 flags |= VP8_EFLAG_NO_UPD_LAST;
784 break;
785 case kTemporalUpdateNoneNoRefAltref:
786 flags |= VP8_EFLAG_NO_REF_ARF;
787 [[fallthrough]];
788 case kTemporalUpdateNone:
789 flags |= VP8_EFLAG_NO_UPD_GF;
790 flags |= VP8_EFLAG_NO_UPD_ARF;
791 flags |= VP8_EFLAG_NO_UPD_LAST;
792 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
793 break;
794 case kTemporalUpdateNoneNoRefGoldenRefAltRef:
795 flags |= VP8_EFLAG_NO_REF_GF;
796 flags |= VP8_EFLAG_NO_UPD_GF;
797 flags |= VP8_EFLAG_NO_UPD_ARF;
798 flags |= VP8_EFLAG_NO_UPD_LAST;
799 flags |= VP8_EFLAG_NO_UPD_ENTROPY;
800 break;
801 case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
802 flags |= VP8_EFLAG_NO_REF_GF;
803 flags |= VP8_EFLAG_NO_UPD_ARF;
804 flags |= VP8_EFLAG_NO_UPD_LAST;
805 break;
806 case kTemporalUpdateLastRefAltRef:
807 flags |= VP8_EFLAG_NO_UPD_GF;
808 flags |= VP8_EFLAG_NO_UPD_ARF;
809 flags |= VP8_EFLAG_NO_REF_GF;
810 break;
811 case kTemporalUpdateGoldenRefAltRef:
812 flags |= VP8_EFLAG_NO_UPD_ARF;
813 flags |= VP8_EFLAG_NO_UPD_LAST;
814 break;
815 case kTemporalUpdateLastAndGoldenRefAltRef:
816 flags |= VP8_EFLAG_NO_UPD_ARF;
817 flags |= VP8_EFLAG_NO_REF_GF;
818 break;
819 case kTemporalUpdateLastRefAll:
820 flags |= VP8_EFLAG_NO_UPD_ARF;
821 flags |= VP8_EFLAG_NO_UPD_GF;
822 break;
823 }
824 }
825 return flags;
826}
827
828// TODO: add support for YUV input color formats
829// TODO: add support for SVC, ARF. SVC and ARF returns multiple frames
830// (hierarchical / noshow) in one call. These frames should be combined in to
831// a single buffer and sent back to the client
832void C2SoftVpxEnc::process(
833 const std::unique_ptr<C2Work> &work,
834 const std::shared_ptr<C2BlockPool> &pool) {
835 // Initialize output work
836 work->result = C2_OK;
837 work->workletsProcessed = 1u;
838 work->worklets.front()->output.flags = work->input.flags;
839
840 if (mSignalledError || mSignalledOutputEos) {
841 work->result = C2_BAD_VALUE;
842 return;
843 }
844 // Initialize encoder if not already
845 if (!mCodecContext && OK != initEncoder()) {
846 ALOGE("Failed to initialize encoder");
847 mSignalledError = true;
848 work->result = C2_CORRUPTED;
849 return;
850 }
851
Patryk Bussec2607552022-09-05 11:33:07 +0000852 std::shared_ptr<C2GraphicView> rView;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800853 std::shared_ptr<C2Buffer> inputBuffer;
854 if (!work->input.buffers.empty()) {
855 inputBuffer = work->input.buffers[0];
Patryk Bussec2607552022-09-05 11:33:07 +0000856 rView = std::make_shared<C2GraphicView>(
Pawin Vongmasa36653902018-11-15 00:10:25 -0800857 inputBuffer->data().graphicBlocks().front().map().get());
858 if (rView->error() != C2_OK) {
859 ALOGE("graphic view map err = %d", rView->error());
860 work->result = C2_CORRUPTED;
861 return;
862 }
Patryk Bussec2607552022-09-05 11:33:07 +0000863 //(b/232396154)
864 //workaround for incorrect crop size in view when using surface mode
865 rView->setCrop_be(C2Rect(mSize->width, mSize->height));
Pawin Vongmasa36653902018-11-15 00:10:25 -0800866 } else {
867 ALOGV("Empty input Buffer");
868 uint32_t flags = 0;
869 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
870 flags |= C2FrameData::FLAG_END_OF_STREAM;
871 }
872 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
873 work->worklets.front()->output.buffers.clear();
874 work->worklets.front()->output.ordinal = work->input.ordinal;
875 work->workletsProcessed = 1u;
876 return;
877 }
878
879 const C2ConstGraphicBlock inBuffer =
880 inputBuffer->data().graphicBlocks().front();
Harish Mahendrakar66e98bd2020-06-09 07:45:33 +0530881 if (inBuffer.width() < mSize->width ||
882 inBuffer.height() < mSize->height) {
Pawin Vongmasa36653902018-11-15 00:10:25 -0800883 ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)",
884 inBuffer.width(), mSize->width, inBuffer.height(),
885 mSize->height);
886 mSignalledError = true;
887 work->result = C2_BAD_VALUE;
888 return;
889 }
890 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
891 vpx_image_t raw_frame;
892 const C2PlanarLayout &layout = rView->layout();
Harish Mahendrakar66e98bd2020-06-09 07:45:33 +0530893 uint32_t width = mSize->width;
894 uint32_t height = mSize->height;
Pawin Vongmasa36653902018-11-15 00:10:25 -0800895 if (width > 0x8000 || height > 0x8000) {
896 ALOGE("Image too big: %u x %u", width, height);
897 work->result = C2_BAD_VALUE;
898 return;
899 }
900 uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1);
901 uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1);
902 switch (layout.type) {
903 case C2PlanarLayout::TYPE_RGB:
904 case C2PlanarLayout::TYPE_RGBA: {
Manisha Jajood9f98442021-06-02 11:27:07 +0530905 std::shared_ptr<C2StreamColorAspectsInfo::output> colorAspects;
906 {
907 IntfImpl::Lock lock = mIntf->lock();
908 colorAspects = mIntf->getCodedColorAspects_l();
909 }
Pawin Vongmasa36653902018-11-15 00:10:25 -0800910 ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride,
Manisha Jajood9f98442021-06-02 11:27:07 +0530911 mConversionBuffer.size(), *rView.get(),
912 colorAspects->matrix, colorAspects->range);
Pawin Vongmasa36653902018-11-15 00:10:25 -0800913 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
914 mStrideAlign, mConversionBuffer.data());
915 break;
916 }
917 case C2PlanarLayout::TYPE_YUV: {
918 if (!IsYUV420(*rView)) {
919 ALOGE("input is not YUV420");
920 work->result = C2_BAD_VALUE;
921 return;
922 }
923
924 if (layout.planes[layout.PLANE_Y].colInc == 1
925 && layout.planes[layout.PLANE_U].colInc == 1
926 && layout.planes[layout.PLANE_V].colInc == 1) {
927 // I420 compatible - though with custom offset and stride
928 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height,
929 mStrideAlign, (uint8_t*)rView->data()[0]);
930 raw_frame.planes[1] = (uint8_t*)rView->data()[1];
931 raw_frame.planes[2] = (uint8_t*)rView->data()[2];
932 raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc;
933 raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc;
934 raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc;
935 } else {
936 // copy to I420
937 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride);
938 if (mConversionBuffer.size() >= stride * vstride * 3 / 2) {
939 status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView);
940 if (err != OK) {
941 ALOGE("Buffer conversion failed: %d", err);
942 work->result = C2_BAD_VALUE;
943 return;
944 }
945 vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, stride, vstride,
Liu, Kai1386c36f2019-08-22 13:37:41 +0800946 mStrideAlign, mConversionBuffer.data());
Pawin Vongmasa36653902018-11-15 00:10:25 -0800947 vpx_img_set_rect(&raw_frame, 0, 0, width, height);
948 } else {
949 ALOGE("Conversion buffer is too small: %u x %u for %zu",
950 stride, vstride, mConversionBuffer.size());
951 work->result = C2_BAD_VALUE;
952 return;
953 }
954 }
955 break;
956 }
957 default:
958 ALOGE("Unrecognized plane type: %d", layout.type);
959 work->result = C2_BAD_VALUE;
960 return;
961 }
962
963 vpx_enc_frame_flags_t flags = getEncodeFlags();
964 // handle dynamic config parameters
965 {
966 IntfImpl::Lock lock = mIntf->lock();
967 std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
968 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
969 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
970 lock.unlock();
971
972 if (intraRefresh != mIntraRefresh) {
973 mIntraRefresh = intraRefresh;
974 ALOGV("Got mIntraRefresh request");
975 }
976
977 if (requestSync != mRequestSync) {
978 // we can handle IDR immediately
979 if (requestSync->value) {
980 // unset request
981 C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
982 std::vector<std::unique_ptr<C2SettingResult>> failures;
983 mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
984 ALOGV("Got sync request");
985 flags |= VPX_EFLAG_FORCE_KF;
986 }
987 mRequestSync = requestSync;
988 }
989
990 if (bitrate != mBitrate) {
991 mBitrate = bitrate;
992 mCodecConfiguration->rc_target_bitrate =
993 (mBitrate->value + 500) / 1000;
994 vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext,
995 mCodecConfiguration);
996 if (res != VPX_CODEC_OK) {
997 ALOGE("vpx encoder failed to update bitrate: %s",
998 vpx_codec_err_to_string(res));
999 mSignalledError = true;
1000 work->result = C2_CORRUPTED;
1001 return;
1002 }
1003 }
1004 }
1005
1006 uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
1007 uint32_t frameDuration;
1008 if (inputTimeStamp > mLastTimestamp) {
1009 frameDuration = (uint32_t)(inputTimeStamp - mLastTimestamp);
1010 } else {
1011 // Use default of 30 fps in case of 0 frame rate.
1012 float frameRate = mFrameRate->value;
1013 if (frameRate < 0.001) {
1014 frameRate = 30;
1015 }
1016 frameDuration = (uint32_t)(1000000 / frameRate + 0.5);
1017 }
1018 mLastTimestamp = inputTimeStamp;
1019
1020 vpx_codec_err_t codec_return = vpx_codec_encode(mCodecContext, &raw_frame,
1021 inputTimeStamp,
1022 frameDuration, flags,
1023 VPX_DL_REALTIME);
1024 if (codec_return != VPX_CODEC_OK) {
1025 ALOGE("vpx encoder failed to encode frame");
1026 mSignalledError = true;
1027 work->result = C2_CORRUPTED;
1028 return;
1029 }
1030
1031 bool populated = false;
1032 vpx_codec_iter_t encoded_packet_iterator = nullptr;
1033 const vpx_codec_cx_pkt_t* encoded_packet;
1034 while ((encoded_packet = vpx_codec_get_cx_data(
1035 mCodecContext, &encoded_packet_iterator))) {
1036 if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
1037 std::shared_ptr<C2LinearBlock> block;
1038 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
1039 c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block);
1040 if (err != C2_OK) {
1041 ALOGE("fetchLinearBlock for Output failed with status %d", err);
1042 work->result = C2_NO_MEMORY;
1043 return;
1044 }
1045 C2WriteView wView = block->map().get();
1046 if (wView.error()) {
1047 ALOGE("write view map failed %d", wView.error());
1048 work->result = C2_CORRUPTED;
1049 return;
1050 }
1051
1052 memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
1053 ++mNumInputFrames;
1054
1055 ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
1056 uint32_t flags = 0;
1057 if (eos) {
1058 flags |= C2FrameData::FLAG_END_OF_STREAM;
1059 }
1060 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
1061 work->worklets.front()->output.buffers.clear();
Lajos Molnar2d83c002021-06-09 16:25:07 -07001062 std::shared_ptr<C2Buffer> buffer =
1063 createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
Pawin Vongmasa36653902018-11-15 00:10:25 -08001064 if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
1065 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
Lajos Molnar3bb81cd2019-02-20 15:10:30 -08001066 0u /* stream id */, C2Config::SYNC_FRAME));
Pawin Vongmasa36653902018-11-15 00:10:25 -08001067 }
1068 work->worklets.front()->output.buffers.push_back(buffer);
1069 work->worklets.front()->output.ordinal = work->input.ordinal;
1070 work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts;
1071 work->workletsProcessed = 1u;
1072 populated = true;
1073 if (eos) {
1074 mSignalledOutputEos = true;
1075 ALOGV("signalled EOS");
1076 }
1077 }
1078 }
1079 if (!populated) {
1080 work->workletsProcessed = 0u;
1081 }
1082}
1083
1084c2_status_t C2SoftVpxEnc::drain(
1085 uint32_t drainMode,
1086 const std::shared_ptr<C2BlockPool> &pool) {
1087 (void)pool;
1088 if (drainMode == NO_DRAIN) {
1089 ALOGW("drain with NO_DRAIN: no-op");
1090 return C2_OK;
1091 }
1092 if (drainMode == DRAIN_CHAIN) {
1093 ALOGW("DRAIN_CHAIN not supported");
1094 return C2_OMITTED;
1095 }
1096
1097 return C2_OK;
1098}
1099
1100} // namespace android