Camera: Attach Jpeg/R composite stream metrics

Add Jpeg/R composite logic to generate stream
statistics and attach the corresponding data as
part of the camera device stats.
Cleanup unused jpeg orientation code. The Jpeg/R
orientation in this scenario is handled by the
exif generate from the same capture result which
also takes jpeg orientation in to account.

Bug:262265296
Test: atest -c -d
cts/tests/camera/src/android/hardware/camera2/cts/StillCaptureTest.java#testJpegRCapture
--iterations=10
+
Check that data collected via:
"./out/host/linux-x86/bin/statsd_testdrive 227"
while CTS is running include Jpeg/R stream metrics.

Change-Id: Icc67bbfa57ed3751757e64901122aff6150ee0d0
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 18b28b8..6c9cef8 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -2026,8 +2026,20 @@
     if (remoteCb != 0) {
         remoteCb->onDeviceIdle();
     }
+
+    std::vector<hardware::CameraStreamStats> fullStreamStats = streamStats;
+    {
+        Mutex::Autolock l(mCompositeLock);
+        for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+            hardware::CameraStreamStats compositeStats;
+            mCompositeStreamMap.valueAt(i)->getStreamStats(&compositeStats);
+            if (compositeStats.mWidth > 0) {
+                fullStreamStats.push_back(compositeStats);
+            }
+        }
+    }
     Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
-            streamStats, mUserTag, mVideoStabilizationMode);
+            fullStreamStats, mUserTag, mVideoStabilizationMode);
 }
 
 void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 503cf23..4ed1c28 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -87,6 +87,7 @@
         mCaptureResults.clear();
         mFrameNumberMap.clear();
         mErrorFrameNumbers.clear();
+        mRequestTimeMap.clear();
     }
 
     return deleteInternalStreams();
@@ -97,6 +98,8 @@
     Mutex::Autolock l(mMutex);
     if (!mErrorState && (streamId == getStreamId())) {
         mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+        auto ts = systemTime();
+        mRequestTimeMap.emplace(frameNumber, ts);
     }
 }
 
@@ -111,6 +114,11 @@
 void CompositeStream::eraseResult(int64_t frameNumber) {
     Mutex::Autolock l(mMutex);
 
+    auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+    if (requestTimeIt != mRequestTimeMap.end()) {
+        mRequestTimeMap.erase(requestTimeIt);
+    }
+
     auto it = mPendingCaptureResults.find(frameNumber);
     if (it == mPendingCaptureResults.end()) {
         return;
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index c27faba..a551d11 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -83,6 +83,9 @@
     // Notify when shutter notify is triggered
     virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
 
+    // Get composite stream stats
+    virtual void getStreamStats(hardware::CameraStreamStats* streamStats /*out*/) = 0;
+
     void onResultAvailable(const CaptureResult& result);
     bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
 
@@ -140,6 +143,9 @@
     // Keeps a set buffer/result frame numbers for any errors detected during processing.
     std::set<int64_t> mErrorFrameNumbers;
 
+    // Frame number to request time map
+    std::unordered_map<int64_t, nsecs_t> mRequestTimeMap;
+
 };
 
 }; //namespace camera3
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index de0ed67..fbe99dd 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -69,6 +69,9 @@
     static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
             const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
 
+    // Get composite stream stats
+    void getStreamStats(hardware::CameraStreamStats*) override {};
+
 protected:
 
     bool threadLoop() override;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 3132183..602a247 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -75,6 +75,9 @@
     static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
             const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
 
+    // Get composite stream stats
+    void getStreamStats(hardware::CameraStreamStats*) override {};
+
     static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
             bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName = nullptr);
     static bool isInMemoryTempFileSupported();
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 71f52db..5794747 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -52,6 +52,8 @@
         mP010BufferAcquired(false),
         mBlobBufferAcquired(false),
         mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
+        mOutputStreamUseCase(0),
+        mFirstRequestLatency(-1),
         mProducerListener(new ProducerListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
@@ -152,15 +154,23 @@
         // Negative timestamp indicates that something went wrong during the capture result
         // collection process.
         if (it->first >= 0) {
-            mPendingInputFrames[it->first].frameNumber = std::get<0>(it->second);
+            auto frameNumber = std::get<0>(it->second);
+            mPendingInputFrames[it->first].frameNumber = frameNumber;
             mPendingInputFrames[it->first].result = std::get<1>(it->second);
+            mSessionStatsBuilder.incResultCounter(false /*dropped*/);
         }
         mCaptureResults.erase(it);
     }
 
     while (!mFrameNumberMap.empty()) {
         auto it = mFrameNumberMap.begin();
-        mPendingInputFrames[it->second].frameNumber = it->first;
+        auto frameNumber = it->first;
+        mPendingInputFrames[it->second].frameNumber = frameNumber;
+        auto requestTimeIt = mRequestTimeMap.find(frameNumber);
+        if (requestTimeIt != mRequestTimeMap.end()) {
+            mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
+            mRequestTimeMap.erase(requestTimeIt);
+        }
         mFrameNumberMap.erase(it);
     }
 
@@ -176,6 +186,8 @@
         }
 
         if (frameFound) {
+            mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
+                    0 /*captureLatencyMs*/);
             it = mErrorFrameNumbers.erase(it);
         } else {
             ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
@@ -193,6 +205,7 @@
     bool newInputAvailable = false;
     for (const auto& it : mPendingInputFrames) {
         if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
+                (it.second.requestTimeNs != -1) &&
                 ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
                 (it.first < *currentTs)) {
             *currentTs = it.first;
@@ -247,12 +260,6 @@
         jpegQuality = entry.data.u8[0];
     }
 
-    uint8_t jpegOrientation = 0;
-    entry = inputFrame.result.find(ANDROID_JPEG_ORIENTATION);
-    if (entry.count > 0) {
-        jpegOrientation = entry.data.i32[0];
-    }
-
     if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
             != OK) {
         ALOGE("%s: Unable to configure stream buffer dimensions"
@@ -384,6 +391,14 @@
         .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
     };
     memcpy(header, &blobHeader, sizeof(CameraBlob));
+
+    if (inputFrame.requestTimeNs != -1) {
+        auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
+        mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
+        if (mFirstRequestLatency == -1) {
+            mFirstRequestLatency = captureLatency;
+        }
+    }
     outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
 
     return res;
@@ -410,6 +425,7 @@
         //TODO: Figure out correct requestId
         notifyError(inputFrame->frameNumber, -1 /*requestId*/);
         inputFrame->errorNotified = true;
+        mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
     }
 }
 
@@ -614,6 +630,7 @@
     }
 
     mOutputColorSpace = colorSpace;
+    mOutputStreamUseCase = streamUseCase;
     mBlobWidth = width;
     mBlobHeight = height;
 
@@ -668,6 +685,8 @@
         return res;
     }
 
+    mSessionStatsBuilder.addStream(mP010StreamId);
+
     run("JpegRCompositeStreamProc");
 
     return NO_ERROR;
@@ -772,6 +791,7 @@
     // characteristics data. The actual result data can be used for the jpeg quality but
     // in case it is absent we can default to maximum.
     eraseResult(resultExtras.frameNumber);
+    mSessionStatsBuilder.incResultCounter(true /*dropped*/);
 }
 
 bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
@@ -826,5 +846,31 @@
     return NO_ERROR;
 }
 
+void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
+    if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
+        return;
+    }
+
+    bool deviceError;
+    std::map<int, StreamStats> stats;
+    mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
+            &deviceError, &stats);
+    if (stats.find(mP010StreamId) != stats.end()) {
+        streamStats->mWidth = mBlobWidth;
+        streamStats->mHeight = mBlobHeight;
+        streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
+        streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
+        streamStats->mDynamicRangeProfile = mP010DynamicRange;
+        streamStats->mColorSpace = mOutputColorSpace;
+        streamStats->mStreamUseCase = mOutputStreamUseCase;
+        streamStats->mStartLatencyMs = mFirstRequestLatency;
+        streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
+        streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
+                stats[mP010StreamId].mCaptureLatencyBins.end());
+        streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
+                stats[mP010StreamId].mCaptureLatencyHistogram.end());
+    }
+}
+
 }; // namespace camera3
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 4b462b5..3dfed30 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -22,6 +22,7 @@
 #include "system/graphics-base-v1.1.h"
 
 #include "api1/client2/JpegProcessor.h"
+#include "utils/SessionStatsBuilder.h"
 
 #include "CompositeStream.h"
 
@@ -65,6 +66,9 @@
     static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
             const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
 
+    // Get composite stream stats
+    void getStreamStats(hardware::CameraStreamStats* streamStats) override;
+
 protected:
 
     bool threadLoop() override;
@@ -80,8 +84,10 @@
         bool                      errorNotified;
         int64_t                   frameNumber;
         int32_t                   requestId;
+        nsecs_t                   requestTimeNs;
 
-        InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1) { }
+        InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1),
+            requestTimeNs(-1) { }
     };
 
     status_t processInputFrame(nsecs_t ts, const InputFrame &inputFrame);
@@ -119,6 +125,8 @@
     bool                 mP010BufferAcquired, mBlobBufferAcquired;
     sp<Surface>          mP010Surface, mBlobSurface, mOutputSurface;
     int32_t              mOutputColorSpace;
+    int64_t              mOutputStreamUseCase;
+    nsecs_t              mFirstRequestLatency;
     sp<ProducerListener> mProducerListener;
 
     ssize_t              mMaxJpegBufferSize;
@@ -137,6 +145,8 @@
     std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
 
     const CameraMetadata mStaticInfo;
+
+    SessionStatsBuilder  mSessionStatsBuilder;
 };
 
 }; //namespace camera3