Merge "JPEG/R refactor: rename jpegrecoverymap library to ultrahdr" into udc-dev
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
index 95610fa..703033b 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp
@@ -265,14 +265,16 @@
                 needsUpdate = true;
             }
         }
-        // If not found, set to the highest supported level.
-        if (!found) {
+        // If not found or exceeds max level, set to the highest supported level.
 #ifdef MPEG4
+        if (!found || me.v.level > LEVEL_MP4V_2) {
             me.set().level = LEVEL_MP4V_2;
-#else
-            me.set().level = LEVEL_H263_40;
-#endif
         }
+#else
+        if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
+            me.set().level = LEVEL_H263_40;
+        }
+#endif
         return C2R::Ok();
     }
 
@@ -288,18 +290,6 @@
         return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
     }
 
-    ProfileLevelType getProfileLevel_l() const {
-#ifdef MPEG4
-        if (mProfileLevel->level == LEVEL_MP4V_0) return SIMPLE_PROFILE_LEVEL0;
-        else if (mProfileLevel->level == LEVEL_MP4V_1) return SIMPLE_PROFILE_LEVEL1;
-        return SIMPLE_PROFILE_LEVEL2;  // level == LEVEL_MP4V_2
-#else
-        // library does not export h263 specific levels. No way to map C2 enums to
-        // library specific constants. Return max supported level.
-        return CORE_PROFILE_LEVEL2;
-#endif
-    }
-
    private:
     std::shared_ptr<C2StreamUsageTuning::input> mUsage;
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
@@ -416,7 +406,7 @@
     mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
     mEncParams->rcType = VBR_1;
     mEncParams->vbvDelay = VBV_DELAY;
-    mEncParams->profile_level = mProfileLevel;
+    mEncParams->profile_level = CORE_PROFILE_LEVEL2;
     mEncParams->packetSize = 32;
     mEncParams->rvlcEnable = PV_OFF;
     mEncParams->numLayers = 1;
@@ -457,7 +447,6 @@
         mSize = mIntf->getSize_l();
         mBitrate = mIntf->getBitrate_l();
         mFrameRate = mIntf->getFrameRate_l();
-        mProfileLevel = mIntf->getProfileLevel_l();
     }
     c2_status_t err = initEncParams();
     if (C2_OK != err) {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
index e5c8ea6..43461fc 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h
@@ -65,7 +65,6 @@
     std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
     std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
     std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
-    ProfileLevelType mProfileLevel;
 
     int64_t  mNumInputFrames;
     MP4EncodingMode mEncodeMode;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index d2df4f3..3b29c57 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -147,6 +147,8 @@
       mCCodecCallback(callback),
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
+      mIsSurfaceToDisplay(false),
+      mHasPresentFenceTimes(false),
       mMetaMode(MODE_NONE),
       mInputMetEos(false),
       mSendEncryptedInfoBuffer(false) {
@@ -988,20 +990,36 @@
 
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-    trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
-    processRenderedFrames(qbo.frameTimestamps);
+    if (mIsSurfaceToDisplay) {
+        trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
+        processRenderedFrames(qbo.frameTimestamps);
+    } else {
+        // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+        // when the frame is queued to the non-display surface
+        mCCodecCallback->onOutputFramesRendered(mediaTimeUs, timestampNs);
+    }
 
     return OK;
 }
 
 void CCodecBufferChannel::initializeFrameTrackingFor(ANativeWindow * window) {
+    mTrackedFrames.clear();
+
+    int isSurfaceToDisplay = 0;
+    window->query(window, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &isSurfaceToDisplay);
+    mIsSurfaceToDisplay = isSurfaceToDisplay == 1;
+    // No frame tracking is needed if we're not sending frames to the display
+    if (!mIsSurfaceToDisplay) {
+        // Return early so we don't call into SurfaceFlinger (requiring permissions)
+        return;
+    }
+
     int hasPresentFenceTimes = 0;
     window->query(window, NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT, &hasPresentFenceTimes);
     mHasPresentFenceTimes = hasPresentFenceTimes == 1;
     if (mHasPresentFenceTimes) {
         ALOGI("Using latch times for frame rendered signals - present fences not supported");
     }
-    mTrackedFrames.clear();
 }
 
 void CCodecBufferChannel::trackReleasedFrame(const IGraphicBufferProducer::QueueBufferOutput& qbo,
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 20dca2b..0d25d6d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -331,6 +331,7 @@
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
     std::deque<TrackedFrame> mTrackedFrames;
+    bool mIsSurfaceToDisplay;
     bool mHasPresentFenceTimes;
 
     struct OutputSurface {