Substantial cleanup of camera2 HAL, and some initial unit tests

- Clean up const/struct usage in HAL
- Add user pointer to notify callback
- Revamp allocate_stream, now it picks its own ID.
- Much simpler stream interface
- Merged request/reprocess input queues
- Frame queue interface no longer a mirror of request queue
- Added triggers/notifications
- Added default request creation
- Lots of comments

Unit tests added:

- Lots of utility code to speed up test writing
- Basic open/close
- Capturing 1 raw buffer (request input, frame output, buffer output)
- Capturing a burst of 10 raw buffers

Bug: 6243944
Change-Id: I490bd5df81079a44c43d87b02c9a7f7ca251f531
diff --git a/tests/camera2/Android.mk b/tests/camera2/Android.mk
index 340ec30..325e82d 100644
--- a/tests/camera2/Android.mk
+++ b/tests/camera2/Android.mk
@@ -2,13 +2,15 @@
 include $(CLEAR_VARS)
 
 LOCAL_SRC_FILES:= \
-	camera2.cpp
+	camera2.cpp \
+	camera2_utils.cpp
 
 LOCAL_SHARED_LIBRARIES := \
 	libutils \
 	libstlport \
 	libhardware \
-	libcamera_metadata
+	libcamera_metadata \
+	libgui
 
 LOCAL_STATIC_LIBRARIES := \
 	libgtest \
@@ -21,7 +23,7 @@
 	external/stlport/stlport \
 	system/media/camera/include \
 
-LOCAL_MODULE:= camera2_hal_tests
+LOCAL_MODULE:= camera2_test
 LOCAL_MODULE_TAGS := tests
 
 include $(BUILD_EXECUTABLE)
diff --git a/tests/camera2/camera2.cpp b/tests/camera2/camera2.cpp
index d13d7cd..50f0b06 100644
--- a/tests/camera2/camera2.cpp
+++ b/tests/camera2/camera2.cpp
@@ -14,10 +14,21 @@
  * limitations under the License.
  */
 
-#include <system/camera_metadata.h>
-#include <hardware/camera2.h>
+#define LOG_TAG "Camera2_test"
+#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
 #include <gtest/gtest.h>
 #include <iostream>
+#include <fstream>
+
+#include <utils/Vector.h>
+#include <gui/CpuConsumer.h>
+#include <system/camera_metadata.h>
+
+#include "camera2_utils.h"
+
+namespace android {
 
 class Camera2Test: public testing::Test {
   public:
@@ -33,12 +44,16 @@
         ASSERT_TRUE(NULL != module)
                 << "No camera module was set by hw_get_module";
 
-        std::cout << "  Camera module name: " << module->name << std::endl;
-        std::cout << "  Camera module author: " << module->author << std::endl;
-        std::cout << "  Camera module API version: 0x" << std::hex
-                  << module->module_api_version << std::endl;
-        std::cout << "  Camera module HAL API version: 0x" << std::hex
-                  << module->hal_api_version << std::endl;
+        IF_ALOGV() {
+            std::cout << "  Camera module name: "
+                    << module->name << std::endl;
+            std::cout << "  Camera module author: "
+                    << module->author << std::endl;
+            std::cout << "  Camera module API version: 0x" << std::hex
+                    << module->module_api_version << std::endl;
+            std::cout << "  Camera module HAL API version: 0x" << std::hex
+                    << module->hal_api_version << std::endl;
+        }
 
         int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0;
         ASSERT_EQ(version2_0, module->module_api_version)
@@ -52,7 +67,10 @@
         sNumCameras = sCameraModule->get_number_of_cameras();
         ASSERT_LT(0, sNumCameras) << "No camera devices available!";
 
-        std::cout << "  Camera device count: " << sNumCameras << std::endl;
+        IF_ALOGV() {
+            std::cout << "  Camera device count: " << sNumCameras << std::endl;
+        }
+
         sCameraSupportsHal2 = new bool[sNumCameras];
 
         for (int i = 0; i < sNumCameras; i++) {
@@ -60,19 +78,24 @@
             res = sCameraModule->get_camera_info(i, &info);
             ASSERT_EQ(0, res)
                     << "Failure getting camera info for camera " << i;
-            std::cout << "  Camera device: " << std::dec
-                      << i << std::endl;;
-            std::cout << "    Facing: " << std::dec
-                      << info.facing  << std::endl;
-            std::cout << "    Orientation: " << std::dec
-                      << info.orientation  << std::endl;
-            std::cout << "    Version: 0x" << std::hex <<
-                    info.device_version  << std::endl;
+            IF_ALOGV() {
+                std::cout << "  Camera device: " << std::dec
+                          << i << std::endl;;
+                std::cout << "    Facing: " << std::dec
+                          << info.facing  << std::endl;
+                std::cout << "    Orientation: " << std::dec
+                          << info.orientation  << std::endl;
+                std::cout << "    Version: 0x" << std::hex <<
+                        info.device_version  << std::endl;
+            }
             if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0) {
                 sCameraSupportsHal2[i] = true;
                 ASSERT_TRUE(NULL != info.static_camera_characteristics);
-                std::cout << "    Static camera metadata:"  << std::endl;
-                dump_camera_metadata(info.static_camera_characteristics, 0, 1);
+                IF_ALOGV() {
+                    std::cout << "    Static camera metadata:"  << std::endl;
+                    dump_camera_metadata(info.static_camera_characteristics,
+                            0, 1);
+                }
             } else {
                 sCameraSupportsHal2[i] = false;
             }
@@ -83,13 +106,26 @@
         return sCameraModule;
     }
 
-    static const camera2_device_t *openCameraDevice(int id) {
+    static int getNumCameras() {
+        return sNumCameras;
+    }
+
+    static bool isHal2Supported(int id) {
+        return sCameraSupportsHal2[id];
+    }
+
+    static camera2_device_t *openCameraDevice(int id) {
+        ALOGV("Opening camera %d", id);
         if (NULL == sCameraSupportsHal2) return NULL;
         if (id >= sNumCameras) return NULL;
         if (!sCameraSupportsHal2[id]) return NULL;
 
         hw_device_t *device = NULL;
         const camera_module_t *cam_module = getCameraModule();
+        if (cam_module == NULL) {
+            return NULL;
+        }
+
         char camId[10];
         int res;
 
@@ -98,7 +134,7 @@
             (const hw_module_t*)cam_module,
             camId,
             &device);
-        if (res < 0 || cam_module == NULL) {
+        if (res != NO_ERROR || device == NULL) {
             return NULL;
         }
         camera2_device_t *cam_device =
@@ -106,18 +142,439 @@
         return cam_device;
     }
 
-  private:
+    static status_t configureCameraDevice(camera2_device_t *dev,
+            MetadataQueue &requestQueue,
+            MetadataQueue  &frameQueue,
+            NotifierListener &listener) {
 
+        status_t err;
+
+        err = dev->ops->set_request_queue_src_ops(dev,
+                requestQueue.getToConsumerInterface());
+        if (err != OK) return err;
+
+        requestQueue.setFromConsumerInterface(dev);
+
+        err = dev->ops->set_frame_queue_dst_ops(dev,
+                frameQueue.getToProducerInterface());
+        if (err != OK) return err;
+
+        err = listener.getNotificationsFrom(dev);
+        if (err != OK) return err;
+
+        vendor_tag_query_ops_t *vendor_metadata_tag_ops;
+        err = dev->ops->get_metadata_vendor_tag_ops(dev, &vendor_metadata_tag_ops);
+        if (err != OK) return err;
+
+        err = set_camera_metadata_vendor_tag_ops(vendor_metadata_tag_ops);
+        if (err != OK) return err;
+
+        return OK;
+    }
+
+    static status_t closeCameraDevice(camera2_device_t *cam_dev) {
+        int res;
+        ALOGV("Closing camera %p", cam_dev);
+
+        hw_device_t *dev = reinterpret_cast<hw_device_t *>(cam_dev);
+        res = dev->close(dev);
+        return res;
+    }
+
+    void setUpCamera(int id) {
+        ASSERT_GT(sNumCameras, id);
+        status_t res;
+
+        if (mDevice != NULL) {
+            closeCameraDevice(mDevice);
+        }
+        mDevice = openCameraDevice(id);
+        ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device";
+
+        camera_info info;
+        res = sCameraModule->get_camera_info(id, &info);
+        ASSERT_EQ(OK, res);
+
+        mStaticInfo = info.static_camera_characteristics;
+
+        res = configureCameraDevice(mDevice,
+                mRequests,
+                mFrames,
+                mNotifications);
+        ASSERT_EQ(OK, res) << "Failure to configure camera device";
+
+    }
+
+    void setUpStream(sp<ISurfaceTexture> consumer,
+            int width, int height, int format, int *id) {
+        status_t res;
+
+        StreamAdapter* stream = new StreamAdapter(consumer);
+
+        ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height);
+        res = stream->connectToDevice(mDevice, width, height, format);
+        ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: "
+                                 << strerror(-res);
+        mStreams.push_back(stream);
+
+        *id = stream->getId();
+    }
+
+    void disconnectStream(int id) {
+        status_t res;
+        unsigned int i=0;
+        for (; i < mStreams.size(); i++) {
+            if (mStreams[i]->getId() == id) {
+                res = mStreams[i]->disconnect();
+                ASSERT_EQ(NO_ERROR, res) <<
+                        "Failed to disconnect stream " << id;
+                break;
+            }
+        }
+        ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
+    }
+
+    void getResolutionList(uint32_t format,
+            uint32_t **list,
+            size_t *count) {
+
+        uint32_t *availableFormats;
+        size_t   availableFormatsCount;
+        status_t res;
+        res = find_camera_metadata_entry(mStaticInfo,
+                ANDROID_SCALER_AVAILABLE_FORMATS,
+                NULL,
+                (void**)&availableFormats,
+                &availableFormatsCount);
+        ASSERT_EQ(OK, res);
+
+        uint32_t formatIdx;
+        for (formatIdx=0; formatIdx < availableFormatsCount; formatIdx++) {
+            if (availableFormats[formatIdx] == format) break;
+        }
+        ASSERT_NE(availableFormatsCount, formatIdx)
+                << "No support found for format 0x" << std::hex << format;
+
+        uint32_t *availableSizesPerFormat;
+        size_t    availableSizesPerFormatCount;
+        res = find_camera_metadata_entry(mStaticInfo,
+                ANDROID_SCALER_AVAILABLE_SIZES_PER_FORMAT,
+                NULL,
+                (void**)&availableSizesPerFormat,
+                &availableSizesPerFormatCount);
+        ASSERT_EQ(OK, res);
+
+        int size_offset = 0;
+        for (unsigned int i=0; i < formatIdx; i++) {
+            size_offset += availableSizesPerFormat[i];
+        }
+
+        uint32_t *availableSizes;
+        size_t    availableSizesCount;
+        res = find_camera_metadata_entry(mStaticInfo,
+                ANDROID_SCALER_AVAILABLE_SIZES,
+                NULL,
+                (void**)&availableSizes,
+                &availableSizesCount);
+        ASSERT_EQ(OK, res);
+
+        *list = availableSizes + size_offset;
+        *count = availableSizesPerFormat[formatIdx];
+    }
+
+    virtual void SetUp() {
+        const ::testing::TestInfo* const testInfo =
+                ::testing::UnitTest::GetInstance()->current_test_info();
+
+        ALOGV("*** Starting test %s in test case %s", testInfo->name(), testInfo->test_case_name());
+        mDevice = NULL;
+    }
+
+    virtual void TearDown() {
+        for (unsigned int i = 0; i < mStreams.size(); i++) {
+            delete mStreams[i];
+        }
+        if (mDevice != NULL) {
+            closeCameraDevice(mDevice);
+        }
+    }
+
+    camera2_device    *mDevice;
+    camera_metadata_t *mStaticInfo;
+
+    MetadataQueue    mRequests;
+    MetadataQueue    mFrames;
+    NotifierListener mNotifications;
+
+    Vector<StreamAdapter*> mStreams;
+
+  private:
     static camera_module_t *sCameraModule;
-    static int sNumCameras;
-    static bool *sCameraSupportsHal2;
+    static int              sNumCameras;
+    static bool            *sCameraSupportsHal2;
 };
 
 camera_module_t *Camera2Test::sCameraModule = NULL;
-int Camera2Test::sNumCameras = 0;
-bool *Camera2Test::sCameraSupportsHal2 = NULL;
+bool *Camera2Test::sCameraSupportsHal2      = NULL;
+int Camera2Test::sNumCameras                = 0;
+
+static const nsecs_t USEC = 1000;
+static const nsecs_t MSEC = 1000*USEC;
+static const nsecs_t SEC = 1000*MSEC;
 
 
-TEST_F(Camera2Test, Basic) {
-    ASSERT_TRUE(NULL != getCameraModule());
+TEST_F(Camera2Test, OpenClose) {
+    status_t res;
+
+    for (int id = 0; id < getNumCameras(); id++) {
+        if (!isHal2Supported(id)) continue;
+
+        camera2_device_t *d = openCameraDevice(id);
+        ASSERT_TRUE(NULL != d) << "Failed to open camera device";
+
+        res = closeCameraDevice(d);
+        ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
+    }
 }
+
+TEST_F(Camera2Test, Capture1Raw) {
+    status_t res;
+
+    for (int id = 0; id < getNumCameras(); id++) {
+        if (!isHal2Supported(id)) continue;
+
+        ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
+
+        sp<CpuConsumer> rawConsumer = new CpuConsumer(1);
+        sp<FrameWaiter> rawWaiter = new FrameWaiter();
+        rawConsumer->setFrameAvailableListener(rawWaiter);
+
+        uint32_t *rawResolutions;
+        size_t    rawResolutionsCount;
+
+        int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
+
+        getResolutionList(format,
+                &rawResolutions, &rawResolutionsCount);
+        ASSERT_LT((uint32_t)0, rawResolutionsCount);
+
+        // Pick first available raw resolution
+        int width = rawResolutions[0];
+        int height = rawResolutions[1];
+
+        int streamId;
+        ASSERT_NO_FATAL_FAILURE(
+            setUpStream(rawConsumer->getProducerInterface(),
+                    width, height, format, &streamId) );
+
+        camera_metadata_t *request;
+        request = allocate_camera_metadata(20, 2000);
+
+        uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+        add_camera_metadata_entry(request,
+                ANDROID_REQUEST_METADATA_MODE,
+                (void**)&metadataMode, 1);
+        uint32_t outputStreams = streamId;
+        add_camera_metadata_entry(request,
+                ANDROID_REQUEST_OUTPUT_STREAMS,
+                (void**)&outputStreams, 1);
+
+        uint64_t exposureTime = 2*MSEC;
+        add_camera_metadata_entry(request,
+                ANDROID_SENSOR_EXPOSURE_TIME,
+                (void**)&exposureTime, 1);
+        uint64_t frameDuration = 30*MSEC;
+        add_camera_metadata_entry(request,
+                ANDROID_SENSOR_FRAME_DURATION,
+                (void**)&frameDuration, 1);
+        uint32_t sensitivity = 100;
+        add_camera_metadata_entry(request,
+                ANDROID_SENSOR_SENSITIVITY,
+                (void**)&sensitivity, 1);
+
+        uint32_t hourOfDay = 12;
+        add_camera_metadata_entry(request,
+                0x80000000, // EMULATOR_HOUROFDAY
+                &hourOfDay, 1);
+
+        IF_ALOGV() {
+            std::cout << "Input request: " << std::endl;
+            dump_camera_metadata(request, 0, 1);
+        }
+
+        res = mRequests.enqueue(request);
+        ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
+
+        res = mFrames.waitForBuffer(exposureTime + SEC);
+        ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
+
+        camera_metadata_t *frame;
+        res = mFrames.dequeue(&frame);
+        ASSERT_EQ(NO_ERROR, res);
+        ASSERT_TRUE(frame != NULL);
+
+        IF_ALOGV() {
+            std::cout << "Output frame:" << std::endl;
+            dump_camera_metadata(frame, 0, 1);
+        }
+
+        res = rawWaiter->waitForFrame(exposureTime + SEC);
+        ASSERT_EQ(NO_ERROR, res);
+
+        CpuConsumer::LockedBuffer buffer;
+        res = rawConsumer->lockNextBuffer(&buffer);
+        ASSERT_EQ(NO_ERROR, res);
+
+        IF_ALOGV() {
+            const char *dumpname =
+                    "/data/local/tmp/camera2_test-capture1raw-dump.raw";
+            ALOGV("Dumping raw buffer to %s", dumpname);
+            // Write to file
+            std::ofstream rawFile(dumpname);
+            for (unsigned int y = 0; y < buffer.height; y++) {
+                rawFile.write((const char *)(buffer.data + y * buffer.stride * 2),
+                        buffer.width * 2);
+            }
+            rawFile.close();
+        }
+
+        res = rawConsumer->unlockBuffer(buffer);
+        ASSERT_EQ(NO_ERROR, res);
+
+        ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
+
+        res = closeCameraDevice(mDevice);
+        ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
+
+    }
+}
+
+TEST_F(Camera2Test, CaptureBurstRaw) {
+    status_t res;
+
+    for (int id = 0; id < getNumCameras(); id++) {
+        if (!isHal2Supported(id)) continue;
+
+        ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
+
+        sp<CpuConsumer> rawConsumer = new CpuConsumer(1);
+        sp<FrameWaiter> rawWaiter = new FrameWaiter();
+        rawConsumer->setFrameAvailableListener(rawWaiter);
+
+        uint32_t *rawResolutions;
+        size_t    rawResolutionsCount;
+
+        int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
+
+        getResolutionList(format,
+                &rawResolutions, &rawResolutionsCount);
+        ASSERT_LT((uint32_t)0, rawResolutionsCount);
+
+        // Pick first available raw resolution
+        int width = rawResolutions[0];
+        int height = rawResolutions[1];
+
+        int streamId;
+        ASSERT_NO_FATAL_FAILURE(
+            setUpStream(rawConsumer->getProducerInterface(),
+                    width, height, format, &streamId) );
+
+        camera_metadata_t *request;
+        request = allocate_camera_metadata(20, 2000);
+
+        uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+        add_camera_metadata_entry(request,
+                ANDROID_REQUEST_METADATA_MODE,
+                (void**)&metadataMode, 1);
+        uint32_t outputStreams = streamId;
+        add_camera_metadata_entry(request,
+                ANDROID_REQUEST_OUTPUT_STREAMS,
+                (void**)&outputStreams, 1);
+
+        uint64_t frameDuration = 30*MSEC;
+        add_camera_metadata_entry(request,
+                ANDROID_SENSOR_FRAME_DURATION,
+                (void**)&frameDuration, 1);
+        uint32_t sensitivity = 100;
+        add_camera_metadata_entry(request,
+                ANDROID_SENSOR_SENSITIVITY,
+                (void**)&sensitivity, 1);
+
+        uint32_t hourOfDay = 12;
+        add_camera_metadata_entry(request,
+                0x80000000, // EMULATOR_HOUROFDAY
+                &hourOfDay, 1);
+
+        IF_ALOGV() {
+            std::cout << "Input request template: " << std::endl;
+            dump_camera_metadata(request, 0, 1);
+        }
+
+        int numCaptures = 10;
+
+        // Enqueue numCaptures requests with increasing exposure time
+
+        uint64_t exposureTime = 1 * MSEC;
+        for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) {
+            camera_metadata_t *req;
+            req = allocate_camera_metadata(20, 2000);
+            append_camera_metadata(req, request);
+
+            add_camera_metadata_entry(req,
+                    ANDROID_SENSOR_EXPOSURE_TIME,
+                    (void**)&exposureTime, 1);
+            exposureTime *= 2;
+
+            res = mRequests.enqueue(req);
+            ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: "
+                    << strerror(-res);
+        }
+
+        // Get frames and image buffers one by one
+        for (int frameCount = 0; frameCount < 10; frameCount++) {
+            res = mFrames.waitForBuffer(SEC);
+            ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
+
+            camera_metadata_t *frame;
+            res = mFrames.dequeue(&frame);
+            ASSERT_EQ(NO_ERROR, res);
+            ASSERT_TRUE(frame != NULL);
+
+            uint32_t *frameNumber;
+            res = find_camera_metadata_entry(frame,
+                    ANDROID_REQUEST_FRAME_COUNT,
+                    NULL, (void**)&frameNumber, NULL);
+            ASSERT_EQ(NO_ERROR, res);
+            ASSERT_EQ(frameCount, *frameNumber);
+
+            res = rawWaiter->waitForFrame(SEC);
+            ASSERT_EQ(NO_ERROR, res) <<
+                    "Never got raw data for capture " << frameCount;
+
+            CpuConsumer::LockedBuffer buffer;
+            res = rawConsumer->lockNextBuffer(&buffer);
+            ASSERT_EQ(NO_ERROR, res);
+
+            IF_ALOGV() {
+                char dumpname[60];
+                snprintf(dumpname, 60,
+                        "/data/local/tmp/camera2_test-capture1raw-dump_%d.raw",
+                        frameCount);
+                ALOGV("Dumping raw buffer to %s", dumpname);
+                // Write to file
+                std::ofstream rawFile(dumpname);
+                for (unsigned int y = 0; y < buffer.height; y++) {
+                    rawFile.write(
+                            (const char *)(buffer.data + y * buffer.stride * 2),
+                            buffer.width * 2);
+                }
+                rawFile.close();
+            }
+
+            res = rawConsumer->unlockBuffer(buffer);
+            ASSERT_EQ(NO_ERROR, res);
+        }
+    }
+}
+
+} // namespace android
diff --git a/tests/camera2/camera2_utils.cpp b/tests/camera2/camera2_utils.cpp
new file mode 100644
index 0000000..bd56644
--- /dev/null
+++ b/tests/camera2/camera2_utils.cpp
@@ -0,0 +1,583 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Utility classes for camera2 HAL testing
+
+#define LOG_TAG "Camera2_test_utils"
+#define LOG_NDEBUG 0
+
+#include "utils/Log.h"
+#include "camera2_utils.h"
+
+namespace android {
+
+/**
+ * MetadataQueue
+ */
+
+MetadataQueue::MetadataQueue():
+            mDevice(NULL),
+            mFrameCount(0),
+            mCount(0),
+            mStreamSlotCount(0),
+            mSignalConsumer(true)
+{
+    camera2_request_queue_src_ops::dequeue_request = consumer_dequeue;
+    camera2_request_queue_src_ops::request_count = consumer_buffer_count;
+    camera2_request_queue_src_ops::free_request = consumer_free;
+
+    camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue;
+    camera2_frame_queue_dst_ops::cancel_frame = producer_cancel;
+    camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue;
+}
+
+MetadataQueue::~MetadataQueue() {
+    freeBuffers(mEntries.begin(), mEntries.end());
+    freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
+}
+
+// Interface to camera2 HAL as consumer (input requests/reprocessing)
+camera2_request_queue_src_ops_t* MetadataQueue::getToConsumerInterface() {
+    return static_cast<camera2_request_queue_src_ops_t*>(this);
+}
+
+void MetadataQueue::setFromConsumerInterface(camera2_device_t *d) {
+    mDevice = d;
+}
+
+camera2_frame_queue_dst_ops_t* MetadataQueue::getToProducerInterface() {
+    return static_cast<camera2_frame_queue_dst_ops_t*>(this);
+}
+
+// Real interfaces
+status_t MetadataQueue::enqueue(camera_metadata_t *buf) {
+    Mutex::Autolock l(mMutex);
+
+    mCount++;
+    mEntries.push_back(buf);
+    notEmpty.signal();
+
+    if (mSignalConsumer && mDevice != NULL) {
+        mSignalConsumer = false;
+
+        mMutex.unlock();
+        ALOGV("%s: Signaling consumer", __FUNCTION__);
+        mDevice->ops->notify_request_queue_not_empty(mDevice);
+        mMutex.lock();
+    }
+    return OK;
+}
+
+int MetadataQueue::getBufferCount() {
+    Mutex::Autolock l(mMutex);
+    if (mStreamSlotCount > 0) {
+        return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS;
+    }
+    return mCount;
+}
+
+status_t MetadataQueue::dequeue(camera_metadata_t **buf, bool incrementCount) {
+    Mutex::Autolock l(mMutex);
+
+    if (mCount == 0) {
+        if (mStreamSlotCount == 0) {
+            ALOGV("%s: Empty", __FUNCTION__);
+            *buf = NULL;
+            mSignalConsumer = true;
+            return OK;
+        }
+        ALOGV("%s: Streaming %d frames to queue", __FUNCTION__,
+              mStreamSlotCount);
+
+        for (List<camera_metadata_t*>::iterator slotEntry = mStreamSlot.begin();
+                slotEntry != mStreamSlot.end();
+                slotEntry++ ) {
+            size_t entries = get_camera_metadata_entry_count(*slotEntry);
+            size_t dataBytes = get_camera_metadata_data_count(*slotEntry);
+
+            camera_metadata_t *copy = allocate_camera_metadata(entries, dataBytes);
+            append_camera_metadata(copy, *slotEntry);
+            mEntries.push_back(copy);
+        }
+        mCount = mStreamSlotCount;
+    }
+    ALOGV("MetadataQueue: deque (%d buffers)", mCount);
+    camera_metadata_t *b = *(mEntries.begin());
+    mEntries.erase(mEntries.begin());
+
+    if (incrementCount) {
+        add_camera_metadata_entry(b,
+                ANDROID_REQUEST_FRAME_COUNT,
+                (void**)&mFrameCount, 1);
+        mFrameCount++;
+    }
+
+    *buf = b;
+    mCount--;
+
+    return OK;
+}
+
+status_t MetadataQueue::waitForBuffer(nsecs_t timeout) {
+    Mutex::Autolock l(mMutex);
+    status_t res;
+    while (mCount == 0) {
+        res = notEmpty.waitRelative(mMutex,timeout);
+        if (res != OK) return res;
+    }
+    return OK;
+}
+
+status_t MetadataQueue::setStreamSlot(camera_metadata_t *buf) {
+    if (buf == NULL) {
+        freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
+        mStreamSlotCount = 0;
+        return OK;
+    }
+    if (mStreamSlotCount > 1) {
+        List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin();
+        freeBuffers(++mStreamSlot.begin(), mStreamSlot.end());
+        mStreamSlotCount = 1;
+    }
+    if (mStreamSlotCount == 1) {
+        free_camera_metadata( *(mStreamSlot.begin()) );
+        *(mStreamSlot.begin()) = buf;
+    } else {
+        mStreamSlot.push_front(buf);
+        mStreamSlotCount = 1;
+    }
+    return OK;
+}
+
+status_t MetadataQueue::setStreamSlot(const List<camera_metadata_t*> &bufs) {
+    if (mStreamSlotCount > 0) {
+        freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
+    }
+    mStreamSlot = bufs;
+    mStreamSlotCount = mStreamSlot.size();
+
+    return OK;
+}
+
+status_t MetadataQueue::freeBuffers(List<camera_metadata_t*>::iterator start,
+                                    List<camera_metadata_t*>::iterator end) {
+    while (start != end) {
+        free_camera_metadata(*start);
+        start = mStreamSlot.erase(start);
+    }
+    return OK;
+}
+
+int MetadataQueue::consumer_buffer_count(
+        camera2_request_queue_src_ops_t *q) {
+    MetadataQueue *queue = static_cast<MetadataQueue *>(q);
+    return queue->getBufferCount();
+}
+
+int MetadataQueue::consumer_dequeue(camera2_request_queue_src_ops_t *q,
+        camera_metadata_t **buffer) {
+    MetadataQueue *queue = static_cast<MetadataQueue *>(q);
+    return queue->dequeue(buffer, true);
+}
+
+int MetadataQueue::consumer_free(camera2_request_queue_src_ops_t *q,
+        camera_metadata_t *old_buffer) {
+    MetadataQueue *queue = static_cast<MetadataQueue *>(q);
+    free_camera_metadata(old_buffer);
+    return OK;
+}
+
+int MetadataQueue::producer_dequeue(camera2_frame_queue_dst_ops_t *q,
+        size_t entries, size_t bytes,
+        camera_metadata_t **buffer) {
+    camera_metadata_t *new_buffer =
+            allocate_camera_metadata(entries, bytes);
+    if (new_buffer == NULL) return NO_MEMORY;
+    *buffer = new_buffer;
+        return OK;
+}
+
+int MetadataQueue::producer_cancel(camera2_frame_queue_dst_ops_t *q,
+        camera_metadata_t *old_buffer) {
+    free_camera_metadata(old_buffer);
+    return OK;
+}
+
+int MetadataQueue::producer_enqueue(camera2_frame_queue_dst_ops_t *q,
+        camera_metadata_t *filled_buffer) {
+    MetadataQueue *queue = static_cast<MetadataQueue *>(q);
+    return queue->enqueue(filled_buffer);
+}
+
+/**
+ * NotifierListener
+ */
+
+NotifierListener::NotifierListener() {
+}
+
+status_t NotifierListener::getNotificationsFrom(camera2_device *dev) {
+    if (!dev) return BAD_VALUE;
+    status_t err;
+    err = dev->ops->set_notify_callback(dev,
+            notify_callback_dispatch,
+            (void*)this);
+    return err;
+}
+
+status_t NotifierListener::getNextNotification(int32_t *msg_type,
+        int32_t *ext1,
+        int32_t *ext2,
+        int32_t *ext3) {
+    Mutex::Autolock l(mMutex);
+    if (mNotifications.size() == 0) return BAD_VALUE;
+    return getNextNotificationLocked(msg_type, ext1, ext2, ext3);
+}
+
+status_t NotifierListener::waitForNotification(int32_t *msg_type,
+        int32_t *ext1,
+        int32_t *ext2,
+        int32_t *ext3) {
+    Mutex::Autolock l(mMutex);
+    while (mNotifications.size() == 0) {
+        mNewNotification.wait(mMutex);
+    }
+    return getNextNotificationLocked(msg_type, ext1, ext2, ext3);
+}
+
+int NotifierListener::numNotifications() {
+    Mutex::Autolock l(mMutex);
+    return mNotifications.size();
+}
+
+status_t NotifierListener::getNextNotificationLocked(int32_t *msg_type,
+        int32_t *ext1,
+        int32_t *ext2,
+        int32_t *ext3) {
+    *msg_type = mNotifications.begin()->msg_type;
+    *ext1 = mNotifications.begin()->ext1;
+    *ext2 = mNotifications.begin()->ext2;
+    *ext3 = mNotifications.begin()->ext3;
+    mNotifications.erase(mNotifications.begin());
+    return OK;
+}
+
+void NotifierListener::onNotify(int32_t msg_type,
+        int32_t ext1,
+        int32_t ext2,
+        int32_t ext3) {
+    Mutex::Autolock l(mMutex);
+    mNotifications.push_back(Notification(msg_type, ext1, ext2, ext3));
+    mNewNotification.signal();
+}
+
+void NotifierListener::notify_callback_dispatch(int32_t msg_type,
+        int32_t ext1,
+        int32_t ext2,
+        int32_t ext3,
+        void *user) {
+    NotifierListener *me = reinterpret_cast<NotifierListener*>(user);
+    me->onNotify(msg_type, ext1, ext2, ext3);
+}
+
+/**
+ * StreamAdapter
+ */
+
+#ifndef container_of
+#define container_of(ptr, type, member) \
+    (type *)((char*)(ptr) - offsetof(type, member))
+#endif
+
+StreamAdapter::StreamAdapter(sp<ISurfaceTexture> consumer):
+        mState(UNINITIALIZED), mDevice(NULL),
+        mId(-1),
+        mWidth(0), mHeight(0), mFormatRequested(0)
+{
+    mConsumerInterface = new SurfaceTextureClient(consumer);
+    camera2_stream_ops::dequeue_buffer = dequeue_buffer;
+    camera2_stream_ops::enqueue_buffer = enqueue_buffer;
+    camera2_stream_ops::cancel_buffer = cancel_buffer;
+    camera2_stream_ops::set_crop = set_crop;
+}
+
+StreamAdapter::~StreamAdapter() {
+    disconnect();
+}
+
+status_t StreamAdapter::connectToDevice(camera2_device_t *d,
+        uint32_t width, uint32_t height, int format) {
+    if (mState != UNINITIALIZED) return INVALID_OPERATION;
+    if (d == NULL) {
+        ALOGE("%s: Null device passed to stream adapter", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    status_t res;
+
+    mWidth = width;
+    mHeight = height;
+    mFormatRequested = format;
+
+    // Allocate device-side stream interface
+
+    uint32_t id;
+    uint32_t formatActual;
+    uint32_t usage;
+    uint32_t maxBuffers = 2;
+    res = d->ops->allocate_stream(d,
+            mWidth, mHeight, mFormatRequested, getStreamOps(),
+            &id, &formatActual, &usage, &maxBuffers);
+    if (res != OK) {
+        ALOGE("%s: Device stream allocation failed: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        mState = UNINITIALIZED;
+        return res;
+    }
+    mDevice = d;
+
+    mId = id;
+    mFormat = formatActual;
+    mUsage = usage;
+    mMaxProducerBuffers = maxBuffers;
+
+    // Configure consumer-side ANativeWindow interface
+
+    res = native_window_api_connect(mConsumerInterface.get(),
+            NATIVE_WINDOW_API_CAMERA);
+    if (res != OK) {
+        ALOGE("%s: Unable to connect to native window for stream %d",
+                __FUNCTION__, mId);
+        mState = ALLOCATED;
+        return res;
+    }
+
+    res = native_window_set_usage(mConsumerInterface.get(), mUsage);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure usage %08x for stream %d",
+                __FUNCTION__, mUsage, mId);
+        mState = CONNECTED;
+        return res;
+    }
+
+    res = native_window_set_buffers_geometry(mConsumerInterface.get(),
+            mWidth, mHeight, mFormat);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure buffer geometry"
+                " %d x %d, format 0x%x for stream %d",
+                __FUNCTION__, mWidth, mHeight, mFormat, mId);
+        mState = CONNECTED;
+        return res;
+    }
+
+    int maxConsumerBuffers;
+    res = mConsumerInterface->query(mConsumerInterface.get(),
+            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
+    if (res != OK) {
+        ALOGE("%s: Unable to query consumer undequeued"
+                " buffer count for stream %d", __FUNCTION__, mId);
+        mState = CONNECTED;
+        return res;
+    }
+    mMaxConsumerBuffers = maxConsumerBuffers;
+
+    ALOGV("%s: Producer wants %d buffers, consumer wants %d", __FUNCTION__,
+            mMaxProducerBuffers, mMaxConsumerBuffers);
+
+    int totalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers;
+
+    res = native_window_set_buffer_count(mConsumerInterface.get(),
+            totalBuffers);
+    if (res != OK) {
+        ALOGE("%s: Unable to set buffer count for stream %d",
+                __FUNCTION__, mId);
+        mState = CONNECTED;
+        return res;
+    }
+
+    // Register allocated buffers with HAL device
+    buffer_handle_t *buffers = new buffer_handle_t[totalBuffers];
+    ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[totalBuffers];
+    int bufferIdx = 0;
+    for (; bufferIdx < totalBuffers; bufferIdx++) {
+        res = mConsumerInterface->dequeueBuffer(mConsumerInterface.get(),
+                &anwBuffers[bufferIdx]);
+        if (res != OK) {
+            ALOGE("%s: Unable to dequeue buffer %d for initial registration for"
+                    "stream %d", __FUNCTION__, bufferIdx, mId);
+            mState = CONNECTED;
+            goto cleanUpBuffers;
+        }
+
+        res = mConsumerInterface->lockBuffer(mConsumerInterface.get(),
+                anwBuffers[bufferIdx]);
+        if (res != OK) {
+            ALOGE("%s: Unable to lock buffer %d for initial registration for"
+                    "stream %d", __FUNCTION__, bufferIdx, mId);
+            mState = CONNECTED;
+            bufferIdx++;
+            goto cleanUpBuffers;
+        }
+
+        buffers[bufferIdx] = anwBuffers[bufferIdx]->handle;
+    }
+
+    res = mDevice->ops->register_stream_buffers(mDevice,
+            mId,
+            totalBuffers,
+            buffers);
+    if (res != OK) {
+        ALOGE("%s: Unable to register buffers with HAL device for stream %d",
+                __FUNCTION__, mId);
+        mState = CONNECTED;
+    } else {
+        mState = ACTIVE;
+    }
+
+cleanUpBuffers:
+    for (int i = 0; i < bufferIdx; i++) {
+        res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(),
+                anwBuffers[i]);
+    }
+    delete anwBuffers;
+    delete buffers;
+
+    return res;
+}
+
+status_t StreamAdapter::disconnect() {
+    status_t res;
+    if (mState >= ALLOCATED) {
+        res = mDevice->ops->release_stream(mDevice, mId);
+        if (res != OK) {
+            ALOGE("%s: Unable to release stream %d",
+                    __FUNCTION__, mId);
+            return res;
+        }
+    }
+    if (mState >= CONNECTED) {
+        res = native_window_api_disconnect(mConsumerInterface.get(),
+                NATIVE_WINDOW_API_CAMERA);
+        if (res != OK) {
+            ALOGE("%s: Unable to disconnect stream %d from native window",
+                    __FUNCTION__, mId);
+            return res;
+        }
+    }
+    mId = -1;
+    mState = DISCONNECTED;
+    return OK;
+}
+
+int StreamAdapter::getId() {
+    return mId;
+}
+
+camera2_stream_ops *StreamAdapter::getStreamOps() {
+    return static_cast<camera2_stream_ops *>(this);
+}
+
+ANativeWindow* StreamAdapter::toANW(camera2_stream_ops_t *w) {
+    return static_cast<StreamAdapter*>(w)->mConsumerInterface.get();
+}
+
+int StreamAdapter::dequeue_buffer(camera2_stream_ops_t *w,
+        buffer_handle_t** buffer) {
+    int res;
+    int state = static_cast<StreamAdapter*>(w)->mState;
+    if (state != ACTIVE) {
+        ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
+        return INVALID_OPERATION;
+    }
+
+    ANativeWindow *a = toANW(w);
+    ANativeWindowBuffer* anb;
+    res = a->dequeueBuffer(a, &anb);
+    if (res != OK) return res;
+    res = a->lockBuffer(a, anb);
+    if (res != OK) return res;
+
+    *buffer = &(anb->handle);
+
+    return res;
+}
+
+int StreamAdapter::enqueue_buffer(camera2_stream_ops_t* w,
+        int64_t timestamp,
+        buffer_handle_t* buffer) {
+    int state = static_cast<StreamAdapter*>(w)->mState;
+    if (state != ACTIVE) {
+        ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
+        return INVALID_OPERATION;
+    }
+    ANativeWindow *a = toANW(w);
+    status_t err;
+    err = native_window_set_buffers_timestamp(a, timestamp);
+    if (err != OK) return err;
+    return a->queueBuffer(a,
+            container_of(buffer, ANativeWindowBuffer, handle));
+}
+
+int StreamAdapter::cancel_buffer(camera2_stream_ops_t* w,
+        buffer_handle_t* buffer) {
+    int state = static_cast<StreamAdapter*>(w)->mState;
+    if (state != ACTIVE) {
+        ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
+        return INVALID_OPERATION;
+    }
+    ANativeWindow *a = toANW(w);
+    return a->cancelBuffer(a,
+            container_of(buffer, ANativeWindowBuffer, handle));
+}
+
+int StreamAdapter::set_crop(camera2_stream_ops_t* w,
+        int left, int top, int right, int bottom) {
+    int state = static_cast<StreamAdapter*>(w)->mState;
+    if (state != ACTIVE) {
+        ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
+        return INVALID_OPERATION;
+    }
+    ANativeWindow *a = toANW(w);
+    android_native_rect_t crop = { left, top, right, bottom };
+    return native_window_set_crop(a, &crop);
+}
+
+/**
+ * FrameWaiter
+ */
+
+FrameWaiter::FrameWaiter():
+        mPendingFrames(0) {
+}
+
+status_t FrameWaiter::waitForFrame(nsecs_t timeout) {
+    status_t res;
+    Mutex::Autolock lock(mMutex);
+    while (mPendingFrames == 0) {
+        res = mCondition.waitRelative(mMutex, timeout);
+        if (res != OK) return res;
+    }
+    mPendingFrames--;
+    return OK;
+}
+
+void FrameWaiter::onFrameAvailable() {
+    Mutex::Autolock lock(mMutex);
+    mPendingFrames++;
+    mCondition.signal();
+}
+
+} // namespace android
diff --git a/tests/camera2/camera2_utils.h b/tests/camera2/camera2_utils.h
new file mode 100644
index 0000000..4e0b521
--- /dev/null
+++ b/tests/camera2/camera2_utils.h
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Utility classes for camera2 HAL testing
+
+#include <system/camera_metadata.h>
+#include <hardware/camera2.h>
+
+#include <gui/SurfaceTextureClient.h>
+#include <gui/CpuConsumer.h>
+
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+
+namespace android {
+
+/**
+ * Queue class for both sending requests to a camera2 device, and for receiving
+ * frames from a camera2 device.
+ */
+class MetadataQueue: public camera2_request_queue_src_ops_t,
+                    public camera2_frame_queue_dst_ops_t {
+  public:
+    MetadataQueue();
+    ~MetadataQueue();
+
+    // Interface to camera2 HAL device, either for requests (device is consumer)
+    // or for frames (device is producer)
+    camera2_request_queue_src_ops_t*   getToConsumerInterface();
+    void setFromConsumerInterface(camera2_device_t *d);
+
+    camera2_frame_queue_dst_ops_t* getToProducerInterface();
+
+    // Real interfaces. On enqueue, queue takes ownership of buffer pointer
+    // On dequeue, user takes ownership of buffer pointer.
+    status_t enqueue(camera_metadata_t *buf);
+    status_t dequeue(camera_metadata_t **buf, bool incrementCount = true);
+    int      getBufferCount();
+    status_t waitForBuffer(nsecs_t timeout);
+
+    // Set repeating buffer(s); if the queue is empty on a dequeue call, the
+    // queue copies the contents of the stream slot into the queue, and then
+    // dequeues the first new entry.
+    status_t setStreamSlot(camera_metadata_t *buf);
+    status_t setStreamSlot(const List<camera_metadata_t*> &bufs);
+
+  private:
+    status_t freeBuffers(List<camera_metadata_t*>::iterator start,
+                         List<camera_metadata_t*>::iterator end);
+
+    camera2_device_t *mDevice;
+
+    Mutex mMutex;
+    Condition notEmpty;
+
+    int mFrameCount;
+
+    int mCount;
+    List<camera_metadata_t*> mEntries;
+    int mStreamSlotCount;
+    List<camera_metadata_t*> mStreamSlot;
+
+    bool mSignalConsumer;
+
+    static int consumer_buffer_count(camera2_request_queue_src_ops_t *q);
+
+    static int consumer_dequeue(camera2_request_queue_src_ops_t *q,
+            camera_metadata_t **buffer);
+
+    static int consumer_free(camera2_request_queue_src_ops_t *q,
+            camera_metadata_t *old_buffer);
+
+    static int producer_dequeue(camera2_frame_queue_dst_ops_t *q,
+            size_t entries, size_t bytes,
+            camera_metadata_t **buffer);
+
+    static int producer_cancel(camera2_frame_queue_dst_ops_t *q,
+            camera_metadata_t *old_buffer);
+
+    static int producer_enqueue(camera2_frame_queue_dst_ops_t *q,
+            camera_metadata_t *filled_buffer);
+
+};
+
+/**
+ * Basic class to receive and queue up notifications from the camera device
+ */
+
+class NotifierListener {
+  public:
+
+    NotifierListener();
+
+    status_t getNotificationsFrom(camera2_device *dev);
+
+    status_t getNextNotification(int32_t *msg_type, int32_t *ext1,
+            int32_t *ext2, int32_t *ext3);
+
+    status_t waitForNotification(int32_t *msg_type, int32_t *ext1,
+            int32_t *ext2, int32_t *ext3);
+
+    int numNotifications();
+
+  private:
+
+    status_t getNextNotificationLocked(int32_t *msg_type,
+            int32_t *ext1, int32_t *ext2, int32_t *ext3);
+
+    struct Notification {
+        Notification(int32_t type, int32_t e1, int32_t e2, int32_t e3):
+                msg_type(type),
+                ext1(e1),
+                ext2(e2),
+                ext3(e3)
+        {}
+
+        int32_t msg_type;
+        int32_t ext1;
+        int32_t ext2;
+        int32_t ext3;
+    };
+
+    List<Notification> mNotifications;
+
+    Mutex mMutex;
+    Condition mNewNotification;
+
+    void onNotify(int32_t msg_type,
+            int32_t ext1,
+            int32_t ext2,
+            int32_t ext3);
+
+    static void notify_callback_dispatch(int32_t msg_type,
+            int32_t ext1,
+            int32_t ext2,
+            int32_t ext3,
+            void *user);
+
+};
+
+/**
+ * Adapter from an ISurfaceTexture interface to camera2 device stream ops.
+ * Also takes care of allocating/deallocating stream in device interface
+ */
+class StreamAdapter: public camera2_stream_ops {
+  public:
+    StreamAdapter(sp<ISurfaceTexture> consumer);
+
+    ~StreamAdapter();
+
+    status_t connectToDevice(camera2_device_t *d,
+            uint32_t width, uint32_t height, int format);
+
+    status_t disconnect();
+
+    // Get stream ID. Only valid after a successful connectToDevice call.
+    int      getId();
+
+  private:
+    enum {
+        ERROR = -1,
+        DISCONNECTED = 0,
+        UNINITIALIZED,
+        ALLOCATED,
+        CONNECTED,
+        ACTIVE
+    } mState;
+
+    sp<ANativeWindow> mConsumerInterface;
+    camera2_device_t *mDevice;
+
+    uint32_t mId;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    uint32_t mFormat;
+    uint32_t mUsage;
+    uint32_t mMaxProducerBuffers;
+    uint32_t mMaxConsumerBuffers;
+
+    int mFormatRequested;
+
+    camera2_stream_ops *getStreamOps();
+
+    static ANativeWindow* toANW(camera2_stream_ops_t *w);
+
+    static int dequeue_buffer(camera2_stream_ops_t *w,
+            buffer_handle_t** buffer);
+
+    static int enqueue_buffer(camera2_stream_ops_t* w,
+            int64_t timestamp,
+            buffer_handle_t* buffer);
+
+    static int cancel_buffer(camera2_stream_ops_t* w,
+            buffer_handle_t* buffer);
+
+    static int set_crop(camera2_stream_ops_t* w,
+            int left, int top, int right, int bottom);
+
+};
+
+/**
+ * Simple class to wait on the CpuConsumer to have a frame available
+ */
+class FrameWaiter : public CpuConsumer::FrameAvailableListener {
+  public:
+    FrameWaiter();
+
+    /**
+     * Wait for max timeout nanoseconds for a new frame. Returns
+     * OK if a frame is available, TIMED_OUT if the timeout was reached.
+     */
+    status_t waitForFrame(nsecs_t timeout);
+
+    virtual void onFrameAvailable();
+
+    int mPendingFrames;
+    Mutex mMutex;
+    Condition mCondition;
+};
+
+}