Camera HAL3/API1: Add support for the partial result quirk.

- Camera2Client:
 - Detect partial result quirk
- Camera3Device:
 - Accumulate all partial results together
 - Fire off 3A-only result once all 3A states are available
- FrameProcessorBase:
 - Filter out partials, don't send to listeners
- FrameProcessor:
 - Skip face detect on partials
 - Make sure to only handle a 3A update for a given frame once
 - Trigger AF notifications when AF mode or trigger changes, to
   properly detect all AF transitions.

Bug: 11115603
Change-Id: Iea8aa73c568701562a46071f7ea100624251d10b
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index df3b162..0b6ca5c 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -76,13 +76,15 @@
         return res;
     }
 
-    SharedParameters::Lock l(mParameters);
+    {
+        SharedParameters::Lock l(mParameters);
 
-    res = l.mParameters.initialize(&(mDevice->info()));
-    if (res != OK) {
-        ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        return NO_INIT;
+        res = l.mParameters.initialize(&(mDevice->info()));
+        if (res != OK) {
+            ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
+                    __FUNCTION__, mCameraId, strerror(-res), res);
+            return NO_INIT;
+        }
     }
 
     String8 threadName;
@@ -135,6 +137,7 @@
     mCallbackProcessor->run(threadName.string());
 
     if (gLogLevel >= 1) {
+        SharedParameters::Lock l(mParameters);
         ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
               mCameraId);
         ALOGD("%s", l.mParameters.paramsFlattened.string());
@@ -353,6 +356,10 @@
         result.appendFormat("    meteringCropRegion\n");
         haveQuirk = true;
     }
+    if (p.quirks.partialResults) {
+        result.appendFormat("    usePartialResult\n");
+        haveQuirk = true;
+    }
     if (!haveQuirk) {
         result.appendFormat("    none\n");
     }
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index c34cb12..19acae4 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -29,13 +29,27 @@
 namespace camera2 {
 
 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
-                               wp<Camera2Client> client) :
+                               sp<Camera2Client> client) :
     FrameProcessorBase(device),
     mClient(client),
-    mLastFrameNumberOfFaces(0) {
+    mLastFrameNumberOfFaces(0),
+    mLast3AFrameNumber(-1) {
 
     sp<CameraDeviceBase> d = device.promote();
     mSynthesize3ANotify = !(d->willNotify3A());
+
+    {
+        SharedParameters::Lock l(client->getParameters());
+        mUsePartialQuirk = l.mParameters.quirks.partialResults;
+
+        // Initialize starting 3A state
+        m3aState.afTriggerId = l.mParameters.afTriggerCounter;
+        m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
+        // Check if lens is fixed-focus
+        if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
+            m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
+        }
+    }
 }
 
 FrameProcessor::~FrameProcessor() {
@@ -49,20 +63,25 @@
         return false;
     }
 
-    if (processFaceDetect(frame, client) != OK) {
+    bool partialResult = false;
+    if (mUsePartialQuirk) {
+        camera_metadata_entry_t entry;
+        entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+        if (entry.count > 0 &&
+                entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+            partialResult = true;
+        }
+    }
+
+    if (!partialResult && processFaceDetect(frame, client) != OK) {
         return false;
     }
 
     if (mSynthesize3ANotify) {
-        // Ignoring missing fields for now
         process3aState(frame, client);
     }
 
-    if (!FrameProcessorBase::processSingleFrame(frame, device)) {
-        return false;
-    }
-
-    return true;
+    return FrameProcessorBase::processSingleFrame(frame, device);
 }
 
 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
@@ -198,86 +217,75 @@
 
     ATRACE_CALL();
     camera_metadata_ro_entry_t entry;
-    int mId = client->getCameraId();
+    int cameraId = client->getCameraId();
 
     entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
     int32_t frameNumber = entry.data.i32[0];
 
+    // Don't send 3A notifications for the same frame number twice
+    if (frameNumber <= mLast3AFrameNumber) {
+        ALOGV("%s: Already sent 3A for frame number %d, skipping",
+                __FUNCTION__, frameNumber);
+        return OK;
+    }
+
+    mLast3AFrameNumber = frameNumber;
+
     // Get 3A states from result metadata
     bool gotAllStates = true;
 
     AlgState new3aState;
 
-    entry = frame.find(ANDROID_CONTROL_AE_STATE);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        new3aState.aeState =
-                static_cast<camera_metadata_enum_android_control_ae_state>(
-                    entry.data.u8[0]);
-    }
+    // TODO: Also use AE mode, AE trigger ID
 
-    entry = frame.find(ANDROID_CONTROL_AF_STATE);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        new3aState.afState =
-                static_cast<camera_metadata_enum_android_control_af_state>(
-                    entry.data.u8[0]);
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_MODE,
+            &new3aState.afMode, frameNumber, cameraId);
 
-    entry = frame.find(ANDROID_CONTROL_AWB_STATE);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        new3aState.awbState =
-                static_cast<camera_metadata_enum_android_control_awb_state>(
-                    entry.data.u8[0]);
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_MODE,
+            &new3aState.awbMode, frameNumber, cameraId);
 
-    int32_t afTriggerId = 0;
-    entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        afTriggerId = entry.data.i32[0];
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AE_STATE,
+            &new3aState.aeState, frameNumber, cameraId);
 
-    int32_t aeTriggerId = 0;
-    entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
-    if (entry.count == 0) {
-        ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL"
-                " for frame %d!",
-                __FUNCTION__, mId, frameNumber);
-        gotAllStates = false;
-    } else {
-        aeTriggerId = entry.data.i32[0];
-    }
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_STATE,
+            &new3aState.afState, frameNumber, cameraId);
+
+    gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_STATE,
+            &new3aState.awbState, frameNumber, cameraId);
+
+    gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AF_TRIGGER_ID,
+            &new3aState.afTriggerId, frameNumber, cameraId);
+
+    gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+            &new3aState.aeTriggerId, frameNumber, cameraId);
 
     if (!gotAllStates) return BAD_VALUE;
 
     if (new3aState.aeState != m3aState.aeState) {
-        ALOGV("%s: AE state changed from 0x%x to 0x%x",
-                __FUNCTION__, m3aState.aeState, new3aState.aeState);
-        client->notifyAutoExposure(new3aState.aeState, aeTriggerId);
+        ALOGV("%s: Camera %d: AE state %d->%d",
+                __FUNCTION__, cameraId,
+                m3aState.aeState, new3aState.aeState);
+        client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
     }
-    if (new3aState.afState != m3aState.afState) {
-        ALOGV("%s: AF state changed from 0x%x to 0x%x",
-                __FUNCTION__, m3aState.afState, new3aState.afState);
-        client->notifyAutoFocus(new3aState.afState, afTriggerId);
+
+    if (new3aState.afState != m3aState.afState ||
+        new3aState.afMode != m3aState.afMode ||
+        new3aState.afTriggerId != m3aState.afTriggerId) {
+        ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
+                __FUNCTION__, cameraId,
+                m3aState.afState, new3aState.afState,
+                m3aState.afMode, new3aState.afMode,
+                m3aState.afTriggerId, new3aState.afTriggerId);
+        client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
     }
-    if (new3aState.awbState != m3aState.awbState) {
-        ALOGV("%s: AWB state changed from 0x%x to 0x%x",
-                __FUNCTION__, m3aState.awbState, new3aState.awbState);
-        client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId);
+    if (new3aState.awbState != m3aState.awbState ||
+        new3aState.awbMode != m3aState.awbMode) {
+        ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
+                __FUNCTION__, cameraId,
+                m3aState.awbState, new3aState.awbState,
+                m3aState.awbMode, new3aState.awbMode);
+        client->notifyAutoWhitebalance(new3aState.awbState,
+                new3aState.aeTriggerId);
     }
 
     m3aState = new3aState;
@@ -285,6 +293,39 @@
     return OK;
 }
 
+template<typename Src, typename T>
+bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
+        T* value, int32_t frameNumber, int cameraId) {
+    camera_metadata_ro_entry_t entry;
+    if (value == NULL) {
+        ALOGE("%s: Camera %d: Value to write to is NULL",
+                __FUNCTION__, cameraId);
+        return false;
+    }
+
+    entry = result.find(tag);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
+                __FUNCTION__, cameraId,
+                get_camera_metadata_tag_name(tag), frameNumber);
+        return false;
+    } else {
+        switch(sizeof(Src)){
+            case sizeof(uint8_t):
+                *value = static_cast<T>(entry.data.u8[0]);
+                break;
+            case sizeof(int32_t):
+                *value = static_cast<T>(entry.data.i32[0]);
+                break;
+            default:
+                ALOGE("%s: Camera %d: Unsupported source",
+                        __FUNCTION__, cameraId);
+                return false;
+        }
+    }
+    return true;
+}
+
 
 void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
                                      const camera_frame_metadata &metadata) {
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 2a17d45..856ad32 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -39,7 +39,7 @@
  */
 class FrameProcessor : public FrameProcessorBase {
   public:
-    FrameProcessor(wp<CameraDeviceBase> device, wp<Camera2Client> client);
+    FrameProcessor(wp<CameraDeviceBase> device, sp<Camera2Client> client);
     ~FrameProcessor();
 
   private:
@@ -61,18 +61,44 @@
     status_t process3aState(const CameraMetadata &frame,
             const sp<Camera2Client> &client);
 
+    // Helper for process3aState
+    template<typename Src, typename T>
+    bool get3aResult(const CameraMetadata& result, int32_t tag, T* value,
+            int32_t frameNumber, int cameraId);
+
+
     struct AlgState {
+        // TODO: also track AE mode
+        camera_metadata_enum_android_control_af_mode   afMode;
+        camera_metadata_enum_android_control_awb_mode  awbMode;
+
         camera_metadata_enum_android_control_ae_state  aeState;
         camera_metadata_enum_android_control_af_state  afState;
         camera_metadata_enum_android_control_awb_state awbState;
 
+        int32_t                                        afTriggerId;
+        int32_t                                        aeTriggerId;
+
+        // These defaults need to match those in Parameters.cpp
         AlgState() :
+                afMode(ANDROID_CONTROL_AF_MODE_AUTO),
+                awbMode(ANDROID_CONTROL_AWB_MODE_AUTO),
                 aeState(ANDROID_CONTROL_AE_STATE_INACTIVE),
                 afState(ANDROID_CONTROL_AF_STATE_INACTIVE),
-                awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) {
+                awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE),
+                afTriggerId(0),
+                aeTriggerId(0) {
         }
     } m3aState;
 
+    // Whether the partial result quirk is enabled for this device
+    bool mUsePartialQuirk;
+
+    // Track most recent frame number for which 3A notifications were sent for.
+    // Used to filter against sending 3A notifications for the same frame
+    // several times.
+    int32_t mLast3AFrameNumber;
+
     // Emit FaceDetection event to java if faces changed
     void callbackFaceDetection(sp<Camera2Client> client,
                                const camera_frame_metadata &metadata);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 8a4e75c..1e425ba 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -1047,6 +1047,11 @@
     ALOGV_IF(quirks.meteringCropRegion, "Camera %d: Quirk meteringCropRegion"
                 " enabled", cameraId);
 
+    entry = info->find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+    quirks.partialResults = (entry.count != 0 && entry.data.u8[0] == 1);
+    ALOGV_IF(quirks.partialResults, "Camera %d: Quirk usePartialResult"
+                " enabled", cameraId);
+
     return OK;
 }
 
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index bcbdb99..93ab113 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -209,6 +209,7 @@
         bool triggerAfWithAuto;
         bool useZslFormat;
         bool meteringCropRegion;
+        bool partialResults;
     } quirks;
 
     /**