Merge changes from topic "evs_next"
* changes:
Implement getPhysicalCameraInfo() method
Update EVS VTS test cases
Fix EVS frame handler for VTS
Fix EVS VTS test cases
Prerequsite changes for EVS multi-camera support
diff --git a/automotive/evs/1.1/IEvsCamera.hal b/automotive/evs/1.1/IEvsCamera.hal
index 975b6c6..fc68e60 100644
--- a/automotive/evs/1.1/IEvsCamera.hal
+++ b/automotive/evs/1.1/IEvsCamera.hal
@@ -34,6 +34,23 @@
getCameraInfo_1_1() generates (CameraDesc info);
/**
+ * Returns the description of the physical camera device that backs this
+ * logical camera.
+ *
+ * If a requested device does not either exist or back this logical device,
+ * this method returns a null camera descriptor. And, if this is called on
+ * a physical camera device, this method is the same as getCameraInfo_1_1()
+ * method if a given device ID is matched. Otherwise, this will return a
+ * null camera descriptor.
+ *
+ * @param deviceId Physical camera device identifier string.
+ * @return info The description of a member physical camera device.
+ * This must be the same value as reported by
+ * EvsEnumerator::getCameraList_1_1().
+ */
+ getPhysicalCameraInfo(string deviceId) generates (CameraDesc info);
+
+ /**
* Requests to pause EVS camera stream events.
*
* Like stopVideoStream(), events may continue to arrive for some time
@@ -51,7 +68,7 @@
resumeVideoStream() generates (EvsResult result);
/**
- * Returns a frame that was delivered by to the IEvsCameraStream.
+ * Returns frame that were delivered by to the IEvsCameraStream.
*
* When done consuming a frame delivered to the IEvsCameraStream
* interface, it must be returned to the IEvsCamera for reuse.
@@ -59,10 +76,10 @@
* as one), and if the supply is exhausted, no further frames may be
* delivered until a buffer is returned.
*
- * @param buffer A buffer to be returned.
+ * @param buffer Buffers to be returned.
* @return result Return EvsResult::OK if this call is successful.
*/
- doneWithFrame_1_1(BufferDesc buffer) generates (EvsResult result);
+ doneWithFrame_1_1(vec<BufferDesc> buffer) generates (EvsResult result);
/**
* Requests to be a master client.
@@ -127,8 +144,13 @@
generates (int32_t min, int32_t max, int32_t step);
/**
- * Requests to set a camera parameter. Only a request from the master
- * client will be processed successfully.
+ * Requests to set a camera parameter.
+ *
+ * Only a request from the master client will be processed successfully.
+ * When this method is called on a logical camera device, it will be forwarded
+ * to each physical device and, if it fails to program any physical device,
+ * it will return an error code with the same number of effective values as
+ * the number of backing camera devices.
*
* @param id The identifier of camera parameter, CameraParam enum.
* value A desired parameter value.
@@ -138,21 +160,22 @@
* parameter is not supported.
* EvsResult::UNDERLYING_SERVICE_ERROR if it fails to
* program a value by any other reason.
- * effectiveValue A programmed parameter value. This may differ
+ * effectiveValue Programmed parameter values. This may differ
* from what the client gives if, for example, the
* driver does not support a target parameter.
*/
setIntParameter(CameraParam id, int32_t value)
- generates (EvsResult result, int32_t effectiveValue);
+ generates (EvsResult result, vec<int32_t> effectiveValue);
/**
- * Retrieves a value of given camera parameter.
+ * Retrieves values of given camera parameter.
*
* @param id The identifier of camera parameter, CameraParam enum.
* @return result EvsResult::OK if it succeeds to read a parameter.
* EvsResult::INVALID_ARG if either a requested parameter is
* not supported.
- * value A value of requested camera parameter.
+ * value Values of requested camera parameter, the same number of
+ * values as backing camera devices.
*/
- getIntParameter(CameraParam id) generates(EvsResult result, int32_t value);
+ getIntParameter(CameraParam id) generates(EvsResult result, vec<int32_t> value);
};
diff --git a/automotive/evs/1.1/IEvsCameraStream.hal b/automotive/evs/1.1/IEvsCameraStream.hal
index 9e4ea19..aa35c62 100644
--- a/automotive/evs/1.1/IEvsCameraStream.hal
+++ b/automotive/evs/1.1/IEvsCameraStream.hal
@@ -18,7 +18,7 @@
import @1.0::IEvsCameraStream;
import @1.1::BufferDesc;
-import @1.1::EvsEvent;
+import @1.1::EvsEventDesc;
/**
* Implemented on client side to receive asynchronous streaming event deliveries.
@@ -26,7 +26,7 @@
interface IEvsCameraStream extends @1.0::IEvsCameraStream {
/**
- * Receives calls from the HAL each time a video frame is ready for inspection.
+ * Receives calls from the HAL each time video frames is ready for inspection.
* Buffer handles received by this method must be returned via calls to
* IEvsCamera::doneWithFrame_1_1(). When the video stream is stopped via a call
* to IEvsCamera::stopVideoStream(), this callback may continue to happen for
@@ -35,14 +35,19 @@
* event must be delivered. No further frame deliveries may happen
* thereafter.
*
- * @param buffer a buffer descriptor of a delivered image frame.
+ * A camera device will deliver the same number of frames as number of
+ * backing physical camera devices; it means, a physical camera device
+ * sends always a single frame and a logical camera device sends multiple
+ * frames as many as number of backing physical camera devices.
+ *
+ * @param buffer Buffer descriptors of delivered image frames.
*/
- oneway deliverFrame_1_1(BufferDesc buffer);
+ oneway deliverFrame_1_1(vec<BufferDesc> buffer);
/**
* Receives calls from the HAL each time an event happens.
*
* @param event EVS event with possible event information.
*/
- oneway notify(EvsEvent event);
+ oneway notify(EvsEventDesc event);
};
diff --git a/automotive/evs/1.1/default/Android.bp b/automotive/evs/1.1/default/Android.bp
index 41cb426..88fd657 100644
--- a/automotive/evs/1.1/default/Android.bp
+++ b/automotive/evs/1.1/default/Android.bp
@@ -16,7 +16,7 @@
shared_libs: [
"android.hardware.automotive.evs@1.0",
"android.hardware.automotive.evs@1.1",
- "android.hardware.camera.device@3.2",
+ "android.hardware.camera.device@3.3",
"libbase",
"libbinder",
"liblog",
diff --git a/automotive/evs/1.1/default/ConfigManager.cpp b/automotive/evs/1.1/default/ConfigManager.cpp
index 96a2f98..986793e 100644
--- a/automotive/evs/1.1/default/ConfigManager.cpp
+++ b/automotive/evs/1.1/default/ConfigManager.cpp
@@ -42,55 +42,32 @@
while (curElem != nullptr) {
if (!strcmp(curElem->Name(), "group")) {
/* camera group identifier */
- const char *group_id = curElem->FindAttribute("group_id")->Value();
+ const char *id = curElem->FindAttribute("id")->Value();
- /* create CameraGroup */
- unique_ptr<ConfigManager::CameraGroup> aCameraGroup(new ConfigManager::CameraGroup());
+ /* create a camera group to be filled */
+ CameraGroupInfo *aCamera = new CameraGroupInfo();
- /* add a camera device to its group */
- addCameraDevices(curElem->FindAttribute("device_id")->Value(), aCameraGroup);
-
- /* a list of camera stream configurations */
- const XMLElement *childElem =
- curElem->FirstChildElement("caps")->FirstChildElement("stream");
- while (childElem != nullptr) {
- /* read 5 attributes */
- const XMLAttribute *idAttr = childElem->FindAttribute("id");
- const XMLAttribute *widthAttr = childElem->FindAttribute("width");
- const XMLAttribute *heightAttr = childElem->FindAttribute("height");
- const XMLAttribute *fmtAttr = childElem->FindAttribute("format");
- const XMLAttribute *fpsAttr = childElem->FindAttribute("framerate");
-
- const int32_t id = stoi(idAttr->Value());
- int32_t framerate = 0;
- if (fpsAttr != nullptr) {
- framerate = stoi(fpsAttr->Value());
- }
-
- int32_t pixFormat;
- if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
- pixFormat)) {
- RawStreamConfiguration cfg = {
- id,
- stoi(widthAttr->Value()),
- stoi(heightAttr->Value()),
- pixFormat,
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
- framerate
- };
- aCameraGroup->streamConfigurations[id] = cfg;
- }
-
- childElem = childElem->NextSiblingElement("stream");
+ /* read camera device information */
+ if (!readCameraDeviceInfo(aCamera, curElem)) {
+ ALOGW("Failed to read a camera information of %s", id);
+ delete aCamera;
+ continue;
}
/* camera group synchronization */
const char *sync = curElem->FindAttribute("synchronized")->Value();
- aCameraGroup->synchronized =
- static_cast<bool>(strcmp(sync, "false"));
+ if (!strcmp(sync, "CALIBRATED")) {
+ aCamera->synchronized =
+ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED;
+ } else if (!strcmp(sync, "APPROXIMATE")) {
+ aCamera->synchronized =
+ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE;
+ } else {
+ aCamera->synchronized = 0; // Not synchronized
+ }
/* add a group to hash map */
- mCameraGroups[group_id] = std::move(aCameraGroup);
+ mCameraGroupInfos.insert_or_assign(id, unique_ptr<CameraGroupInfo>(aCamera));
} else if (!strcmp(curElem->Name(), "device")) {
/* camera unique identifier */
const char *id = curElem->FindAttribute("id")->Value();
@@ -98,8 +75,18 @@
/* camera mount location */
const char *pos = curElem->FindAttribute("position")->Value();
+ /* create a camera device to be filled */
+ CameraInfo *aCamera = new CameraInfo();
+
+ /* read camera device information */
+ if (!readCameraDeviceInfo(aCamera, curElem)) {
+ ALOGW("Failed to read a camera information of %s", id);
+ delete aCamera;
+ continue;
+ }
+
/* store read camera module information */
- mCameraInfo[id] = readCameraDeviceInfo(curElem);
+ mCameraInfo.insert_or_assign(id, unique_ptr<CameraInfo>(aCamera));
/* assign a camera device to a position group */
mCameraPosition[pos].emplace(id);
@@ -113,15 +100,13 @@
}
-unique_ptr<ConfigManager::CameraInfo>
-ConfigManager::readCameraDeviceInfo(const XMLElement *aDeviceElem) {
- if (aDeviceElem == nullptr) {
- return nullptr;
+bool
+ConfigManager::readCameraDeviceInfo(CameraInfo *aCamera,
+ const XMLElement *aDeviceElem) {
+ if (aCamera == nullptr || aDeviceElem == nullptr) {
+ return false;
}
- /* create a CameraInfo to be filled */
- unique_ptr<ConfigManager::CameraInfo> aCamera(new ConfigManager::CameraInfo());
-
/* size information to allocate camera_metadata_t */
size_t totalEntries = 0;
size_t totalDataSize = 0;
@@ -145,14 +130,15 @@
"allocated memory was not large enough");
}
- return aCamera;
+ return true;
}
-size_t ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
- unique_ptr<ConfigManager::CameraInfo> &aCamera,
- size_t &dataSize) {
- if (aCapElem == nullptr) {
+size_t
+ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
+ CameraInfo *aCamera,
+ size_t &dataSize) {
+ if (aCapElem == nullptr || aCamera == nullptr) {
return 0;
}
@@ -214,7 +200,7 @@
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
framerate
};
- aCamera->streamConfigurations[id] = cfg;
+ aCamera->streamConfigurations.insert_or_assign(id, cfg);
}
curElem = curElem->NextSiblingElement("stream");
@@ -232,10 +218,11 @@
}
-size_t ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
- unique_ptr<ConfigManager::CameraInfo> &aCamera,
- size_t &dataSize) {
- if (aParamElem == nullptr) {
+size_t
+ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
+ CameraInfo *aCamera,
+ size_t &dataSize) {
+ if (aParamElem == nullptr || aCamera == nullptr) {
return 0;
}
@@ -258,8 +245,9 @@
count
);
- aCamera->cameraMetadata[tag] =
- make_pair(make_unique<void *>(data), count);
+ aCamera->cameraMetadata.insert_or_assign(
+ tag, make_pair(make_unique<void *>(data), count)
+ );
++numEntries;
dataSize += calculate_camera_metadata_entry_data_size(
@@ -269,6 +257,52 @@
break;
}
+ case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
+ camera_metadata_enum_android_request_available_capabilities_t *data =
+ new camera_metadata_enum_android_request_available_capabilities_t[1];
+ if (ConfigManagerUtil::convertToCameraCapability(
+ curElem->FindAttribute("value")->Value(), *data)) {
+ curElem->FindAttribute("value")->Value(),
+ aCamera->cameraMetadata.insert_or_assign(
+ tag, make_pair(make_unique<void *>(data), 1)
+ );
+
+ ++numEntries;
+ dataSize += calculate_camera_metadata_entry_data_size(
+ get_camera_metadata_tag_type(tag), 1
+ );
+ }
+ break;
+ }
+
+ case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
+ /* a comma-separated list of physical camera devices */
+ size_t len = strlen(curElem->FindAttribute("value")->Value());
+ char *data = new char[len + 1];
+ memcpy(data,
+ curElem->FindAttribute("value")->Value(),
+ len * sizeof(char));
+
+ /* replace commas with null char */
+ char *p = data;
+ while (*p != '\0') {
+ if (*p == ',') {
+ *p = '\0';
+ }
+ ++p;
+ }
+
+ aCamera->cameraMetadata.insert_or_assign(
+ tag, make_pair(make_unique<void *>(data), len)
+ );
+
+ ++numEntries;
+ dataSize += calculate_camera_metadata_entry_data_size(
+ get_camera_metadata_tag_type(tag), len
+ );
+ break;
+ }
+
default:
ALOGW("Parameter %s is not supported",
curElem->FindAttribute("name")->Value());
@@ -283,10 +317,11 @@
}
-bool ConfigManager::constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
- const size_t totalEntries,
- const size_t totalDataSize) {
- if (!aCamera->allocate(totalEntries, totalDataSize)) {
+bool
+ConfigManager::constructCameraMetadata(CameraInfo *aCamera,
+ const size_t totalEntries,
+ const size_t totalDataSize) {
+ if (aCamera == nullptr || !aCamera->allocate(totalEntries, totalDataSize)) {
ALOGE("Failed to allocate memory for camera metadata");
return false;
}
@@ -401,14 +436,14 @@
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
0 // unused
};
- dpy->streamConfigurations[id] = cfg;
+ dpy->streamConfigurations.insert_or_assign(id, cfg);
}
curStream = curStream->NextSiblingElement("stream");
}
}
- mDisplayInfo[id] = std::move(dpy);
+ mDisplayInfo.insert_or_assign(id, std::move(dpy));
curDev = curDev->NextSiblingElement("device");
}
@@ -457,16 +492,6 @@
}
-void ConfigManager::addCameraDevices(const char *devices,
- unique_ptr<CameraGroup> &aGroup) {
- stringstream device_list(devices);
- string token;
- while (getline(device_list, token, ',')) {
- aGroup->devices.emplace(token);
- }
-}
-
-
std::unique_ptr<ConfigManager> ConfigManager::Create(const char *path) {
unique_ptr<ConfigManager> cfgMgr(new ConfigManager(path));
diff --git a/automotive/evs/1.1/default/ConfigManager.h b/automotive/evs/1.1/default/ConfigManager.h
index 0275f90..870af1c 100644
--- a/automotive/evs/1.1/default/ConfigManager.h
+++ b/automotive/evs/1.1/default/ConfigManager.h
@@ -82,9 +82,6 @@
unordered_map<CameraParam,
tuple<int32_t, int32_t, int32_t>> controls;
- /* List of supported frame rates */
- unordered_set<int32_t> frameRates;
-
/*
* List of supported output stream configurations; each array stores
* format, width, height, and direction values in the order.
@@ -102,21 +99,15 @@
camera_metadata_t *characteristics;
};
- class CameraGroup {
+ class CameraGroupInfo : public CameraInfo {
public:
- CameraGroup() {}
+ CameraGroupInfo() {}
/* ID of member camera devices */
unordered_set<string> devices;
/* The capture operation of member camera devices are synchronized */
bool synchronized = false;
-
- /*
- * List of stream configurations that are supposed by all camera devices
- * in this group.
- */
- unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
};
class SystemInfo {
@@ -165,11 +156,11 @@
/*
* Return a list of cameras
*
- * @return CameraGroup
+ * @return CameraGroupInfo
* A pointer to a camera group identified by a given id.
*/
- unique_ptr<CameraGroup>& getCameraGroup(const string& gid) {
- return mCameraGroups[gid];
+ unique_ptr<CameraGroupInfo>& getCameraGroupInfo(const string& gid) {
+ return mCameraGroupInfos[gid];
}
@@ -203,8 +194,8 @@
/* Internal data structure for camera device information */
unordered_map<string, unique_ptr<DisplayInfo>> mDisplayInfo;
- /* Camera groups are stored in <groud id, CameraGroup> hash map */
- unordered_map<string, unique_ptr<CameraGroup>> mCameraGroups;
+ /* Camera groups are stored in <groud id, CameraGroupInfo> hash map */
+ unordered_map<string, unique_ptr<CameraGroupInfo>> mCameraGroupInfos;
/*
* Camera positions are stored in <position, camera id set> hash map.
@@ -253,16 +244,19 @@
/*
* read camera device information
*
- * @param aDeviceElem
+ * @param aCamera
+ * A pointer to CameraInfo that will be completed by this
+ * method.
+ * aDeviceElem
* A pointer to "device" XML element that contains camera module
* capability info and its characteristics.
*
- * @return unique_ptr<CameraInfo>
- * A pointer to CameraInfo class that contains camera module
- * capability and characteristics. Please note that this transfers
- * the ownership of created CameraInfo to the caller.
+ * @return bool
+ * Return false upon any failure in reading and processing camera
+ * device information.
*/
- unique_ptr<CameraInfo> readCameraDeviceInfo(const XMLElement *aDeviceElem);
+ bool readCameraDeviceInfo(CameraInfo *aCamera,
+ const XMLElement *aDeviceElem);
/*
* read camera metadata
@@ -280,7 +274,7 @@
* Number of camera metadata entries
*/
size_t readCameraCapabilities(const XMLElement * const aCapElem,
- unique_ptr<CameraInfo> &aCamera,
+ CameraInfo *aCamera,
size_t &dataSize);
/*
@@ -298,7 +292,7 @@
* Number of camera metadata entries
*/
size_t readCameraMetadata(const XMLElement * const aParamElem,
- unique_ptr<CameraInfo> &aCamera,
+ CameraInfo *aCamera,
size_t &dataSize);
/*
@@ -316,21 +310,9 @@
* or its size is not large enough to add all found camera metadata
* entries.
*/
- bool constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
+ bool constructCameraMetadata(CameraInfo *aCamera,
const size_t totalEntries,
const size_t totalDataSize);
-
- /*
- * parse a comma-separated list of camera devices and add them to
- * CameraGroup.
- *
- * @param devices
- * A comma-separated list of camera device identifiers.
- * @param aGroup
- * Camera group which cameras will be added to.
- */
- void addCameraDevices(const char *devices,
- unique_ptr<CameraGroup> &aGroup);
};
#endif // CONFIG_MANAGER_H
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.cpp b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
index 8206daa..d10f236 100644
--- a/automotive/evs/1.1/default/ConfigManagerUtil.cpp
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
@@ -90,6 +90,30 @@
aTag = ANDROID_LENS_POSE_ROTATION;
} else if (!strcmp(name, "LENS_POSE_TRANSLATION")) {
aTag = ANDROID_LENS_POSE_TRANSLATION;
+ } else if (!strcmp(name, "REQUEST_AVAILABLE_CAPABILITIES")) {
+ aTag = ANDROID_REQUEST_AVAILABLE_CAPABILITIES;
+ } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA_PHYSICAL_IDS")) {
+ aTag = ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS;
+ } else {
+ return false;
+ }
+
+ return true;
+}
+
+
+bool ConfigManagerUtil::convertToCameraCapability(
+ const char *name,
+ camera_metadata_enum_android_request_available_capabilities_t &cap) {
+
+ if (!strcmp(name, "DEPTH_OUTPUT")) {
+ cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT;
+ } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA")) {
+ cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA;
+ } else if (!strcmp(name, "MONOCHROME")) {
+ cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME;
+ } else if (!strcmp(name, "SECURE_IMAGE_DATA")) {
+ cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA;
} else {
return false;
}
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.h b/automotive/evs/1.1/default/ConfigManagerUtil.h
index 8c89ae7..1710cac 100644
--- a/automotive/evs/1.1/default/ConfigManagerUtil.h
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.h
@@ -55,6 +55,14 @@
*/
static string trimString(const string &src,
const string &ws = " \n\r\t\f\v");
+
+ /**
+ * Convert a given string to corresponding camera capabilities
+ */
+ static bool convertToCameraCapability(
+ const char *name,
+ camera_metadata_enum_android_request_available_capabilities_t &cap);
+
};
#endif // CONFIG_MANAGER_UTIL_H
diff --git a/automotive/evs/1.1/default/EvsCamera.cpp b/automotive/evs/1.1/default/EvsCamera.cpp
index 5ba753d..b7e4efa 100644
--- a/automotive/evs/1.1/default/EvsCamera.cpp
+++ b/automotive/evs/1.1/default/EvsCamera.cpp
@@ -21,7 +21,7 @@
#include <ui/GraphicBufferAllocator.h>
#include <ui/GraphicBufferMapper.h>
-
+#include <utils/SystemClock.h>
namespace android {
namespace hardware {
@@ -240,9 +240,23 @@
}
-Return<EvsResult> EvsCamera::doneWithFrame_1_1(const BufferDesc_1_1& bufDesc) {
+Return<void> EvsCamera::getPhysicalCameraInfo(const hidl_string& id,
+ getCameraInfo_1_1_cb _hidl_cb) {
+ ALOGD("%s", __FUNCTION__);
+
+ // This works exactly same as getCameraInfo_1_1() in default implementation.
+ (void)id;
+ _hidl_cb(mDescription);
+ return Void();
+}
+
+
+Return<EvsResult> EvsCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
std::lock_guard <std::mutex> lock(mAccessLock);
- returnBuffer(bufDesc.bufferId, bufDesc.buffer.nativeHandle);
+
+ for (auto&& buffer : buffers) {
+ returnBuffer(buffer.bufferId, buffer.buffer.nativeHandle);
+ }
return EvsResult::OK;
}
@@ -490,12 +504,17 @@
newBuffer.buffer.nativeHandle = mBuffers[idx].handle;
newBuffer.pixelSize = sizeof(uint32_t);
newBuffer.bufferId = idx;
+ newBuffer.deviceId = mDescription.v1.cameraId;
+ newBuffer.timestamp = elapsedRealtimeNano();
// Write test data into the image buffer
fillTestFrame(newBuffer);
// Issue the (asynchronous) callback to the client -- can't be holding the lock
- auto result = mStream->deliverFrame_1_1(newBuffer);
+ hidl_vec<BufferDesc_1_1> frames;
+ frames.resize(1);
+ frames[0] = newBuffer;
+ auto result = mStream->deliverFrame_1_1(frames);
if (result.isOk()) {
ALOGD("Delivered %p as id %d",
newBuffer.buffer.nativeHandle.getNativeHandle(), newBuffer.bufferId);
@@ -527,7 +546,7 @@
}
// If we've been asked to stop, send an event to signal the actual end of stream
- EvsEvent event;
+ EvsEventDesc event;
event.aType = EvsEventType::STREAM_STOPPED;
auto result = mStream->notify(event);
if (!result.isOk()) {
diff --git a/automotive/evs/1.1/default/EvsCamera.h b/automotive/evs/1.1/default/EvsCamera.h
index c15b4b1..72a1b57 100644
--- a/automotive/evs/1.1/default/EvsCamera.h
+++ b/automotive/evs/1.1/default/EvsCamera.h
@@ -62,9 +62,11 @@
// Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
Return<void> getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb) override;
+ Return<void> getPhysicalCameraInfo(const hidl_string& id,
+ getPhysicalCameraInfo_cb _hidl_cb) override;
Return<EvsResult> pauseVideoStream() override;
Return<EvsResult> resumeVideoStream() override;
- Return<EvsResult> doneWithFrame_1_1(const BufferDesc_1_1& buffer) override;
+ Return<EvsResult> doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
Return<EvsResult> setMaster() override;
Return<EvsResult> forceMaster(const sp<IEvsDisplay>& display) override;
Return<EvsResult> unsetMaster() override;
diff --git a/automotive/evs/1.1/default/resources/evs_default_configuration.xml b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
index 692102e..a79e7c2 100644
--- a/automotive/evs/1.1/default/resources/evs_default_configuration.xml
+++ b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
@@ -28,8 +28,31 @@
<num_cameras value='1'/>
</system>
- <!-- camera device information -->
+ <!-- camera information -->
<camera>
+ <!-- camera group starts -->
+ <group id='group1' synchronized='APPROXIMATE'>
+ <caps>
+ <stream id='0' width='640' height='360' format='RGBA_8888' framerate='30'/>
+ </caps>
+
+ <!-- list of parameters -->
+ <characteristics>
+ <parameter
+ name='REQUEST_AVAILABLE_CAPABILITIES'
+ type='enum'
+ size='1'
+ value='LOGICAL_MULTI_CAMERA'
+ />
+ <parameter
+ name='LOGICAL_MULTI_CAMERA_PHYSICAL_IDS'
+ type='byte[]'
+ size='1'
+ value='/dev/video1'
+ />
+ </characteristics>
+ </group>
+
<!-- camera device starts -->
<device id='/dev/video1' position='rear'>
<caps>
diff --git a/automotive/evs/1.1/types.hal b/automotive/evs/1.1/types.hal
index dcb2abb..f88d223 100644
--- a/automotive/evs/1.1/types.hal
+++ b/automotive/evs/1.1/types.hal
@@ -61,6 +61,14 @@
* Opaque value from driver
*/
uint32_t bufferId;
+ /**
+ * Unique identifier of the physical camera device that produces this buffer.
+ */
+ string deviceId;
+ /**
+ * Time that this buffer is being filled.
+ */
+ int64_t timestamp;
};
/**
@@ -97,12 +105,16 @@
/**
* Structure that describes informative events occurred during EVS is streaming
*/
-struct EvsEvent {
+struct EvsEventDesc {
/**
* Type of an informative event
*/
EvsEventType aType;
/**
+ * Device identifier
+ */
+ string deviceId;
+ /**
* Possible additional information
*/
uint32_t[4] payload;
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.cpp b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
index 6d53652..ebf488a 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.cpp
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
@@ -80,7 +80,7 @@
asyncStopStream();
// Wait until the stream has actually stopped
- std::unique_lock<std::mutex> lock(mLock);
+ std::unique_lock<std::mutex> lock(mEventLock);
if (mRunning) {
mEventSignal.wait(lock, [this]() { return !mRunning; });
}
@@ -88,7 +88,7 @@
bool FrameHandler::returnHeldBuffer() {
- std::unique_lock<std::mutex> lock(mLock);
+ std::lock_guard<std::mutex> lock(mLock);
// Return the oldest buffer we're holding
if (mHeldBuffers.empty()) {
@@ -96,16 +96,16 @@
return false;
}
- BufferDesc_1_1 buffer = mHeldBuffers.front();
+ hidl_vec<BufferDesc_1_1> buffers = mHeldBuffers.front();
mHeldBuffers.pop();
- mCamera->doneWithFrame_1_1(buffer);
+ mCamera->doneWithFrame_1_1(buffers);
return true;
}
bool FrameHandler::isRunning() {
- std::unique_lock<std::mutex> lock(mLock);
+ std::lock_guard<std::mutex> lock(mLock);
return mRunning;
}
@@ -120,7 +120,7 @@
void FrameHandler::getFramesCounters(unsigned* received, unsigned* displayed) {
- std::unique_lock<std::mutex> lock(mLock);
+ std::lock_guard<std::mutex> lock(mLock);
if (received) {
*received = mFramesReceived;
@@ -138,11 +138,17 @@
}
-Return<void> FrameHandler::deliverFrame_1_1(const BufferDesc_1_1& bufDesc) {
+Return<void> FrameHandler::deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
+ mLock.lock();
+ // For VTS tests, FrameHandler uses a single frame among delivered frames.
+ auto bufferIdx = mFramesDisplayed % buffers.size();
+ auto buffer = buffers[bufferIdx];
+ mLock.unlock();
+
const AHardwareBuffer_Desc* pDesc =
- reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
+ reinterpret_cast<const AHardwareBuffer_Desc *>(&buffer.buffer.description);
ALOGD("Received a frame from the camera (%p)",
- bufDesc.buffer.nativeHandle.getNativeHandle());
+ buffer.buffer.nativeHandle.getNativeHandle());
// Store a dimension of a received frame.
mFrameWidth = pDesc->width;
@@ -150,6 +156,7 @@
// If we were given an opened display at construction time, then send the received
// image back down the camera.
+ bool displayed = false;
if (mDisplay.get()) {
// Get the output buffer we'll use to display the imagery
BufferDesc_1_0 tgtBuffer = {};
@@ -163,7 +170,7 @@
ALOGE("Didn't get requested output buffer -- skipping this frame.");
} else {
// Copy the contents of the of buffer.memHandle into tgtBuffer
- copyBufferContents(tgtBuffer, bufDesc);
+ copyBufferContents(tgtBuffer, buffer);
// Send the target buffer back for display
Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
@@ -179,40 +186,42 @@
} else {
// Everything looks good!
// Keep track so tests or watch dogs can monitor progress
- mLock.lock();
- mFramesDisplayed++;
- mLock.unlock();
+ displayed = true;
}
}
}
+ mLock.lock();
+ // increases counters
+ ++mFramesReceived;
+ mFramesDisplayed += (int)displayed;
+ mLock.unlock();
+ mFrameSignal.notify_all();
switch (mReturnMode) {
case eAutoReturn:
// Send the camera buffer back now that the client has seen it
ALOGD("Calling doneWithFrame");
- mCamera->doneWithFrame_1_1(bufDesc);
+ mCamera->doneWithFrame_1_1(buffers);
break;
case eNoAutoReturn:
- // Hang onto the buffer handle for now -- the client will return it explicitly later
- mHeldBuffers.push(bufDesc);
+ // Hang onto the buffer handles for now -- the client will return it explicitly later
+ mHeldBuffers.push(buffers);
+ break;
}
- mLock.lock();
- ++mFramesReceived;
- mLock.unlock();
- mFrameSignal.notify_all();
-
ALOGD("Frame handling complete");
return Void();
}
-Return<void> FrameHandler::notify(const EvsEvent& event) {
+Return<void> FrameHandler::notify(const EvsEventDesc& event) {
// Local flag we use to keep track of when the stream is stopping
- mLock.lock();
- mLatestEventDesc = event;
+ std::unique_lock<std::mutex> lock(mEventLock);
+ mLatestEventDesc.aType = event.aType;
+ mLatestEventDesc.payload[0] = event.payload[0];
+ mLatestEventDesc.payload[1] = event.payload[1];
if (mLatestEventDesc.aType == EvsEventType::STREAM_STOPPED) {
// Signal that the last frame has been received and the stream is stopped
mRunning = false;
@@ -222,8 +231,8 @@
} else {
ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
}
- mLock.unlock();
- mEventSignal.notify_all();
+ lock.unlock();
+ mEventSignal.notify_one();
return Void();
}
@@ -342,25 +351,34 @@
}
}
-bool FrameHandler::waitForEvent(const EvsEventType aTargetEvent,
- EvsEvent &event) {
+bool FrameHandler::waitForEvent(const EvsEventDesc& aTargetEvent,
+ EvsEventDesc& aReceivedEvent,
+ bool ignorePayload) {
// Wait until we get an expected parameter change event.
- std::unique_lock<std::mutex> lock(mLock);
+ std::unique_lock<std::mutex> lock(mEventLock);
auto now = std::chrono::system_clock::now();
- bool result = mEventSignal.wait_until(lock, now + 5s,
- [this, aTargetEvent, &event](){
- bool flag = mLatestEventDesc.aType == aTargetEvent;
- if (flag) {
- event.aType = mLatestEventDesc.aType;
- event.payload[0] = mLatestEventDesc.payload[0];
- event.payload[1] = mLatestEventDesc.payload[1];
+ bool found = false;
+ while (!found) {
+ bool result = mEventSignal.wait_until(lock, now + 5s,
+ [this, aTargetEvent, ignorePayload, &aReceivedEvent, &found](){
+ found = (mLatestEventDesc.aType == aTargetEvent.aType) &&
+ (ignorePayload || (mLatestEventDesc.payload[0] == aTargetEvent.payload[0] &&
+ mLatestEventDesc.payload[1] == aTargetEvent.payload[1]));
+
+ aReceivedEvent.aType = mLatestEventDesc.aType;
+ aReceivedEvent.payload[0] = mLatestEventDesc.payload[0];
+ aReceivedEvent.payload[1] = mLatestEventDesc.payload[1];
+ return found;
}
+ );
- return flag;
+ if (!result) {
+ ALOGW("A timer is expired before a target event has happened.");
+ break;
}
- );
+ }
- return !result;
+ return found;
}
const char *FrameHandler::eventToString(const EvsEventType aType) {
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.h b/automotive/evs/1.1/vts/functional/FrameHandler.h
index e5f1b8f..21e85fe 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.h
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.h
@@ -73,8 +73,9 @@
bool isRunning();
void waitForFrameCount(unsigned frameCount);
- bool waitForEvent(const EvsEventType aTargetEvent,
- EvsEvent &eventDesc);
+ bool waitForEvent(const EvsEventDesc& aTargetEvent,
+ EvsEventDesc& aReceivedEvent,
+ bool ignorePayload = false);
void getFramesCounters(unsigned* received, unsigned* displayed);
void getFrameDimension(unsigned* width, unsigned* height);
@@ -83,8 +84,8 @@
Return<void> deliverFrame(const BufferDesc_1_0& buffer) override;
// Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
- Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer) override;
- Return<void> notify(const EvsEvent& event) override;
+ Return<void> deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
+ Return<void> notify(const EvsEventDesc& event) override;
// Local implementation details
bool copyBufferContents(const BufferDesc_1_0& tgtBuffer, const BufferDesc_1_1& srcBuffer);
@@ -99,17 +100,18 @@
// Since we get frames delivered to us asynchronously via the IEvsCameraStream interface,
// we need to protect all member variables that may be modified while we're streaming
// (ie: those below)
- std::mutex mLock;
- std::condition_variable mEventSignal;
- std::condition_variable mFrameSignal;
+ std::mutex mLock;
+ std::mutex mEventLock;
+ std::condition_variable mEventSignal;
+ std::condition_variable mFrameSignal;
+ std::queue<hidl_vec<BufferDesc_1_1>> mHeldBuffers;
- std::queue<BufferDesc_1_1> mHeldBuffers;
bool mRunning = false;
unsigned mFramesReceived = 0; // Simple counter -- rolls over eventually!
unsigned mFramesDisplayed = 0; // Simple counter -- rolls over eventually!
unsigned mFrameWidth = 0;
unsigned mFrameHeight = 0;
- EvsEvent mLatestEventDesc;
+ EvsEventDesc mLatestEventDesc;
};
diff --git a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
index 1d3fd87..4fc4e4c 100644
--- a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
+++ b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
@@ -41,6 +41,8 @@
#include <cstdio>
#include <cstring>
#include <cstdlib>
+#include <thread>
+#include <unordered_set>
#include <hidl/HidlTransportSupport.h>
#include <hwbinder/ProcessState.h>
@@ -67,6 +69,7 @@
using ::android::hardware::hidl_handle;
using ::android::hardware::hidl_string;
using ::android::sp;
+using ::android::wp;
using ::android::hardware::camera::device::V3_2::Stream;
using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
using ::android::hardware::automotive::evs::V1_0::DisplayState;
@@ -117,7 +120,15 @@
mIsHwModule = !service_name.compare(kEnumeratorName);
}
- virtual void TearDown() override {}
+ virtual void TearDown() override {
+ // Attempt to close any active camera
+ for (auto &&c : activeCameras) {
+ sp<IEvsCamera_1_1> cam = c.promote();
+ if (cam != nullptr) {
+ pEnumerator->closeCamera(cam);
+ }
+ }
+ }
protected:
void loadCameraList() {
@@ -141,10 +152,90 @@
ASSERT_GE(cameraInfo.size(), 1u);
}
- sp<IEvsEnumerator> pEnumerator; // Every test needs access to the service
- std::vector <CameraDesc> cameraInfo; // Empty unless/until loadCameraList() is called
- bool mIsHwModule; // boolean to tell current module under testing
- // is HW module implementation.
+ bool isLogicalCamera(const camera_metadata_t *metadata) {
+ if (metadata == nullptr) {
+ // A logical camera device must have a valid camera metadata.
+ return false;
+ }
+
+ // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
+ camera_metadata_ro_entry_t entry;
+ int rc = find_camera_metadata_ro_entry(metadata,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+ &entry);
+ if (0 != rc) {
+ // No capabilities are found.
+ return false;
+ }
+
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t cap = entry.data.u8[i];
+ if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id,
+ bool& flag) {
+ std::unordered_set<std::string> physicalCameras;
+
+ auto it = cameraInfo.begin();
+ while (it != cameraInfo.end()) {
+ if (it->v1.cameraId == id) {
+ break;
+ }
+ ++it;
+ }
+
+ if (it == cameraInfo.end()) {
+ // Unknown camera is requested. Return an empty list.
+ return physicalCameras;
+ }
+
+ const camera_metadata_t *metadata =
+ reinterpret_cast<camera_metadata_t *>(&it->metadata[0]);
+ flag = isLogicalCamera(metadata);
+ if (!flag) {
+ // EVS assumes that the device w/o a valid metadata is a physical
+ // device.
+ ALOGI("%s is not a logical camera device.", id.c_str());
+ physicalCameras.emplace(id);
+ return physicalCameras;
+ }
+
+ // Look for physical camera identifiers
+ camera_metadata_ro_entry entry;
+ int rc = find_camera_metadata_ro_entry(metadata,
+ ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
+ &entry);
+ ALOGE_IF(rc, "No physical camera ID is found for a logical camera device");
+
+ const uint8_t *ids = entry.data.u8;
+ size_t start = 0;
+ for (size_t i = 0; i < entry.count; ++i) {
+ if (ids[i] == '\0') {
+ if (start != i) {
+ std::string id(reinterpret_cast<const char *>(ids + start));
+ physicalCameras.emplace(id);
+ }
+ start = i + 1;
+ }
+ }
+
+ ALOGI("%s consists of %d physical camera devices.", id.c_str(), (int)physicalCameras.size());
+ return physicalCameras;
+ }
+
+
+ sp<IEvsEnumerator> pEnumerator; // Every test needs access to the service
+ std::vector<CameraDesc> cameraInfo; // Empty unless/until loadCameraList() is called
+ bool mIsHwModule; // boolean to tell current module under testing
+ // is HW module implementation.
+ std::deque<wp<IEvsCamera_1_1>> activeCameras; // A list of active camera handles that are
+ // needed to be cleaned up.
};
@@ -168,12 +259,32 @@
// Open and close each camera twice
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (mIsHwModule && isLogicalCam) {
+ ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+ continue;
+ }
+
for (int pass = 0; pass < 2; pass++) {
+ activeCameras.clear();
sp<IEvsCamera_1_1> pCam =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ for (auto&& devName : devices) {
+ bool matched = false;
+ pCam->getPhysicalCameraInfo(devName,
+ [&devName, &matched](const CameraDesc& info) {
+ matched = devName == info.v1.cameraId;
+ });
+ ASSERT_TRUE(matched);
+ }
+
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam);
+
// Verify that this camera self-identifies correctly
pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
ALOGD("Found camera %s", desc.v1.cameraId.c_str());
@@ -206,11 +317,22 @@
// Open and close each camera twice
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (mIsHwModule && isLogicalCam) {
+ ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+ continue;
+ }
+
+ activeCameras.clear();
sp<IEvsCamera_1_1> pCam =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam);
+
// Verify that this camera self-identifies correctly
pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
ALOGD("Found camera %s", desc.v1.cameraId.c_str());
@@ -221,9 +343,13 @@
sp<IEvsCamera_1_1> pCam2 =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
- ASSERT_NE(pCam, pCam2);
ASSERT_NE(pCam2, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam2);
+
+ ASSERT_NE(pCam, pCam2);
+
Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
if (mIsHwModule) {
// Verify that the old camera rejects calls via HW module.
@@ -268,11 +394,22 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (mIsHwModule && isLogicalCam) {
+ ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+ continue;
+ }
+
+ activeCameras.clear();
sp<IEvsCamera_1_1> pCam =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam);
+
// Set up a frame receiver object which will fire up its own thread
sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
nullptr,
@@ -280,6 +417,7 @@
// Start the camera's video stream
nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
+
bool startResult = frameHandler->startStream();
ASSERT_TRUE(startResult);
@@ -287,9 +425,17 @@
frameHandler->waitForFrameCount(1);
nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
- EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame), kMaxStreamStartMilliseconds);
- printf("Measured time to first frame %0.2f ms\n", timeToFirstFrame * kNanoToMilliseconds);
- ALOGI("Measured time to first frame %0.2f ms", timeToFirstFrame * kNanoToMilliseconds);
+
+ // Extra delays are expected when we attempt to start a video stream on
+ // the logical camera device. The amount of delay is expected the
+ // number of physical camera devices multiplied by
+ // kMaxStreamStartMilliseconds at most.
+ EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
+ kMaxStreamStartMilliseconds * devices.size());
+ printf("%s: Measured time to first frame %0.2f ms\n",
+ cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
+ ALOGI("%s: Measured time to first frame %0.2f ms",
+ cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
// Check aspect ratio
unsigned width = 0, height = 0;
@@ -299,6 +445,13 @@
// Wait a bit, then ensure we get at least the required minimum number of frames
sleep(5);
nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ // Even when the camera pointer goes out of scope, the FrameHandler object will
+ // keep the stream alive unless we tell it to shutdown.
+ // Also note that the FrameHandle and the Camera have a mutual circular reference, so
+ // we have to break that cycle in order for either of them to get cleaned up.
+ frameHandler->shutdown();
+
unsigned framesReceived = 0;
frameHandler->getFramesCounters(&framesReceived, nullptr);
framesReceived = framesReceived - 1; // Back out the first frame we already waited for
@@ -308,12 +461,6 @@
ALOGI("Measured camera rate %3.2f fps", framesPerSecond);
EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
- // Even when the camera pointer goes out of scope, the FrameHandler object will
- // keep the stream alive unless we tell it to shutdown.
- // Also note that the FrameHandle and the Camera have a mutual circular reference, so
- // we have to break that cycle in order for either of them to get cleaned up.
- frameHandler->shutdown();
-
// Explicitly release the camera
pEnumerator->closeCamera(pCam);
}
@@ -340,12 +487,22 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (mIsHwModule && isLogicalCam) {
+ ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+ continue;
+ }
+ activeCameras.clear();
sp<IEvsCamera_1_1> pCam =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam);
+
// Ask for a crazy number of buffers in flight to ensure it errors correctly
Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
@@ -366,7 +523,7 @@
// Check that the video stream stalls once we've gotten exactly the number of buffers
// we requested since we told the frameHandler not to return them.
- sleep(2); // 1 second should be enough for at least 5 frames to be delivered worst case
+ sleep(1); // 1 second should be enough for at least 5 frames to be delivered worst case
unsigned framesReceived = 0;
frameHandler->getFramesCounters(&framesReceived, nullptr);
ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
@@ -416,11 +573,22 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (mIsHwModule && isLogicalCam) {
+ ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+ continue;
+ }
+
+ activeCameras.clear();
sp<IEvsCamera_1_1> pCam =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam);
+
// Set up a frame receiver object which will fire up its own thread.
sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
pDisplay,
@@ -484,17 +652,24 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ activeCameras.clear();
// Create two camera clients.
sp<IEvsCamera_1_1> pCam0 =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam0, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam0);
+
sp<IEvsCamera_1_1> pCam1 =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam1, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam1);
+
// Set up per-client frame receiver objects which will fire up its own thread
sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
nullptr,
@@ -554,6 +729,11 @@
// Explicitly release the camera
pEnumerator->closeCamera(pCam0);
pEnumerator->closeCamera(pCam1);
+
+ // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
+ // destruction of active camera objects; this may be related with two
+ // issues.
+ sleep(1);
}
}
@@ -575,12 +755,25 @@
// Test each reported camera
Return<EvsResult> result = EvsResult::OK;
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (isLogicalCam) {
+ // TODO(b/145465724): Support camera parameter programming on
+ // logical devices.
+ ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+ continue;
+ }
+
+ activeCameras.clear();
// Create a camera client
sp<IEvsCamera_1_1> pCam =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ // Store a camera
+ activeCameras.push_back(pCam);
+
// Get the parameter list
std::vector<CameraParam> cmds;
pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
@@ -626,48 +819,54 @@
EvsResult result = EvsResult::OK;
if (cmd == CameraParam::ABSOLUTE_FOCUS) {
// Try to turn off auto-focus
- int32_t val1 = 0;
- pCam->getIntParameter(CameraParam::AUTO_FOCUS,
- [&result, &val1](auto status, auto value) {
+ std::vector<int32_t> values;
+ pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
- val1 = value;
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
}
});
- if (val1 != 0) {
- pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
- [&result, &val1](auto status, auto effectiveValue) {
- result = status;
- val1 = effectiveValue;
- });
- ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val1, 0);
+ ASSERT_EQ(EvsResult::OK, result);
+ for (auto &&v : values) {
+ ASSERT_EQ(v, 0);
}
}
// Try to program a parameter with a random value [minVal, maxVal]
int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
- int32_t val1 = 0;
+ std::vector<int32_t> values;
// Rounding down
val0 = val0 - (val0 % step);
pCam->setIntParameter(cmd, val0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::OK, result);
+ values.clear();
pCam->getIntParameter(cmd,
- [&result, &val1](auto status, auto value) {
+ [&result, &values](auto status, auto readValues) {
result = status;
if (status == EvsResult::OK) {
- val1 = value;
+ for (auto &&v : readValues) {
+ values.push_back(v);
+ }
}
});
ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val0, val1) << "Values are not matched.";
+ for (auto &&v : values) {
+ ASSERT_EQ(val0, v) << "Values are not matched.";
+ }
}
result = pCam->unsetMaster();
@@ -704,16 +903,33 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (isLogicalCam) {
+ // TODO(b/145465724): Support camera parameter programming on
+ // logical devices.
+ ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+ continue;
+ }
+
+ activeCameras.clear();
// Create two camera clients.
sp<IEvsCamera_1_1> pCamMaster =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCamMaster, nullptr);
+
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCamMaster);
+
sp<IEvsCamera_1_1> pCamNonMaster =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCamNonMaster, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCamNonMaster);
+
// Set up per-client frame receiver objects which will fire up its own thread
sp<FrameHandler> frameHandlerMaster =
new FrameHandler(pCamMaster, cam,
@@ -750,13 +966,45 @@
// Non-master client expects to receive a master role relesed
// notification.
- EvsEvent aNotification = {};
+ EvsEventDesc aTargetEvent = {};
+ EvsEventDesc aNotification = {};
+
+ bool listening = false;
+ std::mutex eventLock;
+ std::condition_variable eventCond;
+ std::thread listener = std::thread(
+ [&aNotification, &frameHandlerNonMaster, &listening, &eventCond]() {
+ // Notify that a listening thread is running.
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+ if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification, true)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+
+ }
+ );
+
+ // Wait until a listening thread starts.
+ std::unique_lock<std::mutex> lock(eventLock);
+ auto timer = std::chrono::system_clock::now();
+ while (!listening) {
+ timer += 1s;
+ eventCond.wait_until(lock, timer);
+ }
+ lock.unlock();
// Release a master role.
pCamMaster->unsetMaster();
- // Verify a change notification.
- frameHandlerNonMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+ // Join a listening thread.
+ if (listener.joinable()) {
+ listener.join();
+ }
+
+ // Verify change notifications.
ASSERT_EQ(EvsEventType::MASTER_RELEASED,
static_cast<EvsEventType>(aNotification.aType));
@@ -768,23 +1016,49 @@
result = pCamMaster->setMaster();
ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
+ listening = false;
+ listener = std::thread(
+ [&aNotification, &frameHandlerMaster, &listening, &eventCond]() {
+ // Notify that a listening thread is running.
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+ if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification, true)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+
+ }
+ );
+
+ // Wait until a listening thread starts.
+ timer = std::chrono::system_clock::now();
+ lock.lock();
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
// Closing current master client.
frameHandlerNonMaster->shutdown();
- // Verify a change notification.
- frameHandlerMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+ // Join a listening thread.
+ if (listener.joinable()) {
+ listener.join();
+ }
+
+ // Verify change notifications.
ASSERT_EQ(EvsEventType::MASTER_RELEASED,
static_cast<EvsEventType>(aNotification.aType));
- // Closing another stream.
+ // Closing streams.
frameHandlerMaster->shutdown();
// Explicitly release the camera
pEnumerator->closeCamera(pCamMaster);
pEnumerator->closeCamera(pCamNonMaster);
}
-
-
}
@@ -810,16 +1084,33 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (isLogicalCam) {
+ // TODO(b/145465724): Support camera parameter programming on
+ // logical devices.
+ ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+ continue;
+ }
+
+ activeCameras.clear();
// Create two camera clients.
sp<IEvsCamera_1_1> pCamMaster =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCamMaster, nullptr);
+
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCamMaster);
+
sp<IEvsCamera_1_1> pCamNonMaster =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCamNonMaster, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCamNonMaster);
+
// Get the parameter list
std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
@@ -879,7 +1170,9 @@
frameHandlerNonMaster->waitForFrameCount(1);
int32_t val0 = 0;
- int32_t val1 = 0;
+ std::vector<int32_t> values;
+ EvsEventDesc aNotification0 = {};
+ EvsEventDesc aNotification1 = {};
for (auto &cmd : camMasterCmds) {
// Get a valid parameter value range
int32_t minVal, maxVal, step;
@@ -895,60 +1188,143 @@
EvsResult result = EvsResult::OK;
if (cmd == CameraParam::ABSOLUTE_FOCUS) {
// Try to turn off auto-focus
- int32_t val1 = 1;
+ values.clear();
pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val1, 0);
+ for (auto &&v : values) {
+ ASSERT_EQ(v, 0);
+ }
}
- // Try to program a parameter
+ // Calculate a parameter value to program.
val0 = minVal + (std::rand() % (maxVal - minVal));
-
- // Rounding down
val0 = val0 - (val0 % step);
+
+ // Prepare and start event listeners.
+ bool listening0 = false;
+ bool listening1 = false;
+ std::condition_variable eventCond;
+ std::thread listener0 = std::thread(
+ [cmd, val0,
+ &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
+ listening0 = true;
+ if (listening1) {
+ eventCond.notify_all();
+ }
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+ aTargetEvent.payload[1] = val0;
+ if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+ std::thread listener1 = std::thread(
+ [cmd, val0,
+ &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
+ listening1 = true;
+ if (listening0) {
+ eventCond.notify_all();
+ }
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+ aTargetEvent.payload[1] = val0;
+ if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a listening thread starts.
+ std::mutex eventLock;
+ std::unique_lock<std::mutex> lock(eventLock);
+ auto timer = std::chrono::system_clock::now();
+ while (!listening0 || !listening1) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
+ // Try to program a parameter
+ values.clear();
pCamMaster->setIntParameter(cmd, val0,
- [&result, &val1](auto status, auto effectiveValue) {
- result = status;
- val1 = effectiveValue;
- });
- ASSERT_EQ(EvsResult::OK, result);
-
- // Wait a moment
- sleep(1);
-
- // Non-master client expects to receive a parameter change notification
- // whenever a master client adjusts it.
- EvsEvent aNotification = {};
-
- pCamMaster->getIntParameter(cmd,
- [&result, &val1](auto status, auto value) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
- val1 = value;
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
+ });
+
+ ASSERT_EQ(EvsResult::OK, result);
+ for (auto &&v : values) {
+ ASSERT_EQ(val0, v) << "Values are not matched.";
+ }
+
+ // Join a listening thread.
+ if (listener0.joinable()) {
+ listener0.join();
+ }
+ if (listener1.joinable()) {
+ listener1.join();
+ }
+
+ // Verify a change notification
+ ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+ static_cast<EvsEventType>(aNotification0.aType));
+ ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+ static_cast<EvsEventType>(aNotification1.aType));
+ ASSERT_EQ(cmd,
+ static_cast<CameraParam>(aNotification0.payload[0]));
+ ASSERT_EQ(cmd,
+ static_cast<CameraParam>(aNotification1.payload[0]));
+ for (auto &&v : values) {
+ ASSERT_EQ(v,
+ static_cast<int32_t>(aNotification0.payload[1]));
+ ASSERT_EQ(v,
+ static_cast<int32_t>(aNotification1.payload[1]));
+ }
+
+ // Clients expects to receive a parameter change notification
+ // whenever a master client adjusts it.
+ values.clear();
+ pCamMaster->getIntParameter(cmd,
+ [&result, &values](auto status, auto readValues) {
+ result = status;
+ if (status == EvsResult::OK) {
+ for (auto &&v : readValues) {
+ values.push_back(v);
+ }
}
});
ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val0, val1) << "Values are not matched.";
-
- // Verify a change notification
- frameHandlerNonMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
- ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
- static_cast<EvsEventType>(aNotification.aType));
- ASSERT_EQ(cmd,
- static_cast<CameraParam>(aNotification.payload[0]));
- ASSERT_EQ(val1,
- static_cast<int32_t>(aNotification.payload[1]));
+ for (auto &&v : values) {
+ ASSERT_EQ(val0, v) << "Values are not matched.";
+ }
}
// Try to adjust a parameter via non-master client
+ values.clear();
pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::INVALID_ARG, result);
@@ -957,14 +1333,48 @@
ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
// Master client retires from a master role
+ bool listening = false;
+ std::condition_variable eventCond;
+ std::thread listener = std::thread(
+ [&aNotification0, &frameHandlerNonMaster, &listening, &eventCond]() {
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+ if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification0, true)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ std::mutex eventLock;
+ auto timer = std::chrono::system_clock::now();
+ unique_lock<std::mutex> lock(eventLock);
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
result = pCamMaster->unsetMaster();
ASSERT_EQ(EvsResult::OK, result);
+ if (listener.joinable()) {
+ listener.join();
+ }
+ ASSERT_EQ(EvsEventType::MASTER_RELEASED,
+ static_cast<EvsEventType>(aNotification0.aType));
+
// Try to adjust a parameter after being retired
+ values.clear();
pCamMaster->setIntParameter(camMasterCmds[0], val0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::INVALID_ARG, result);
@@ -986,55 +1396,128 @@
);
EvsResult result = EvsResult::OK;
+ values.clear();
if (cmd == CameraParam::ABSOLUTE_FOCUS) {
// Try to turn off auto-focus
- int32_t val1 = 1;
+ values.clear();
pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val1, 0);
+ for (auto &&v : values) {
+ ASSERT_EQ(v, 0);
+ }
}
- // Try to program a parameter
+ // Calculate a parameter value to program. This is being rounding down.
val0 = minVal + (std::rand() % (maxVal - minVal));
-
- // Rounding down
val0 = val0 - (val0 % step);
+
+ // Prepare and start event listeners.
+ bool listening0 = false;
+ bool listening1 = false;
+ std::condition_variable eventCond;
+ std::thread listener0 = std::thread(
+ [&cmd, &val0, &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
+ listening0 = true;
+ if (listening1) {
+ eventCond.notify_all();
+ }
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+ aTargetEvent.payload[1] = val0;
+ if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+ std::thread listener1 = std::thread(
+ [&cmd, &val0, &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
+ listening1 = true;
+ if (listening0) {
+ eventCond.notify_all();
+ }
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+ aTargetEvent.payload[1] = val0;
+ if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a listening thread starts.
+ std::mutex eventLock;
+ std::unique_lock<std::mutex> lock(eventLock);
+ auto timer = std::chrono::system_clock::now();
+ while (!listening0 || !listening1) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
+ // Try to program a parameter
+ values.clear();
pCamNonMaster->setIntParameter(cmd, val0,
- [&result, &val1](auto status, auto effectiveValue) {
- result = status;
- val1 = effectiveValue;
- });
- ASSERT_EQ(EvsResult::OK, result);
-
- // Wait a moment
- sleep(1);
-
- // Non-master client expects to receive a parameter change notification
- // whenever a master client adjusts it.
- EvsEvent aNotification = {};
-
- pCamNonMaster->getIntParameter(cmd,
- [&result, &val1](auto status, auto value) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
- val1 = value;
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
}
});
ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val0, val1) << "Values are not matched.";
+
+ // Clients expects to receive a parameter change notification
+ // whenever a master client adjusts it.
+ values.clear();
+ pCamNonMaster->getIntParameter(cmd,
+ [&result, &values](auto status, auto readValues) {
+ result = status;
+ if (status == EvsResult::OK) {
+ for (auto &&v : readValues) {
+ values.push_back(v);
+ }
+ }
+ });
+ ASSERT_EQ(EvsResult::OK, result);
+ for (auto &&v : values) {
+ ASSERT_EQ(val0, v) << "Values are not matched.";
+ }
+
+ // Join a listening thread.
+ if (listener0.joinable()) {
+ listener0.join();
+ }
+ if (listener1.joinable()) {
+ listener1.join();
+ }
// Verify a change notification
- frameHandlerMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
- static_cast<EvsEventType>(aNotification.aType));
+ static_cast<EvsEventType>(aNotification0.aType));
+ ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+ static_cast<EvsEventType>(aNotification1.aType));
ASSERT_EQ(cmd,
- static_cast<CameraParam>(aNotification.payload[0]));
- ASSERT_EQ(val1,
- static_cast<int32_t>(aNotification.payload[1]));
+ static_cast<CameraParam>(aNotification0.payload[0]));
+ ASSERT_EQ(cmd,
+ static_cast<CameraParam>(aNotification1.payload[0]));
+ for (auto &&v : values) {
+ ASSERT_EQ(v,
+ static_cast<int32_t>(aNotification0.payload[1]));
+ ASSERT_EQ(v,
+ static_cast<int32_t>(aNotification1.payload[1]));
+ }
}
// New master retires from a master role
@@ -1078,17 +1561,25 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ activeCameras.clear();
+
// Create two clients
sp<IEvsCamera_1_1> pCam0 =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam0, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam0);
+
sp<IEvsCamera_1_1> pCam1 =
IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
.withDefault(nullptr);
ASSERT_NE(pCam1, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam1);
+
// Get the parameter list; this test will use the first command in both
// lists.
std::vector<CameraParam> cam0Cmds, cam1Cmds;
@@ -1144,108 +1635,260 @@
}
);
- if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
- // Try to turn off auto-focus
- int32_t val1 = 0;
- pCam1->getIntParameter(CameraParam::AUTO_FOCUS,
- [&result, &val1](auto status, auto value) {
- result = status;
- if (status == EvsResult::OK) {
- val1 = value;
- }
- });
- if (val1 != 0) {
- pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
- [&result, &val1](auto status, auto effectiveValue) {
- result = status;
- val1 = effectiveValue;
- });
- ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val1, 0);
- }
- }
-
- // Try to program a parameter with a random value [minVal, maxVal]
- int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
- int32_t val1 = 0;
-
- // Rounding down
- val0 = val0 - (val0 % step);
-
+ // Client1 becomes a master
result = pCam1->setMaster();
ASSERT_EQ(EvsResult::OK, result);
+ std::vector<int32_t> values;
+ EvsEventDesc aTargetEvent = {};
+ EvsEventDesc aNotification = {};
+ bool listening = false;
+ std::mutex eventLock;
+ std::condition_variable eventCond;
+ if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+ std::thread listener = std::thread(
+ [&frameHandler0, &aNotification, &listening, &eventCond] {
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
+ aTargetEvent.payload[1] = 0;
+ if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a lister starts.
+ std::unique_lock<std::mutex> lock(eventLock);
+ auto timer = std::chrono::system_clock::now();
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
+ // Try to turn off auto-focus
+ pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+ [&result, &values](auto status, auto effectiveValues) {
+ result = status;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
+ });
+ ASSERT_EQ(EvsResult::OK, result);
+ for (auto &&v : values) {
+ ASSERT_EQ(v, 0);
+ }
+
+ // Join a listener
+ if (listener.joinable()) {
+ listener.join();
+ }
+
+ // Make sure AUTO_FOCUS is off.
+ ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+ EvsEventType::PARAMETER_CHANGED);
+ }
+
+ // Try to program a parameter with a random value [minVal, maxVal] after
+ // rounding it down.
+ int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
+ val0 = val0 - (val0 % step);
+
+ std::thread listener = std::thread(
+ [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
+ aTargetEvent.payload[1] = val0;
+ if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a lister starts.
+ listening = false;
+ std::unique_lock<std::mutex> lock(eventLock);
+ auto timer = std::chrono::system_clock::now();
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
+ values.clear();
pCam1->setIntParameter(cam1Cmds[0], val0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::OK, result);
+ for (auto &&v : values) {
+ ASSERT_EQ(val0, v);
+ }
+
+ // Join a listener
+ if (listener.joinable()) {
+ listener.join();
+ }
// Verify a change notification
- EvsEvent aNotification = {};
- bool timeout =
- frameHandler0->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
- ASSERT_FALSE(timeout) << "Expected event does not arrive";
ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
EvsEventType::PARAMETER_CHANGED);
ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
cam1Cmds[0]);
- ASSERT_EQ(val1,
- static_cast<int32_t>(aNotification.payload[1]));
+ for (auto &&v : values) {
+ ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
+ }
+
+ listener = std::thread(
+ [&frameHandler1, &aNotification, &listening, &eventCond] {
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+ if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a lister starts.
+ listening = false;
+ lock.lock();
+ timer = std::chrono::system_clock::now();
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
// Client 0 steals a master role
ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
- frameHandler1->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+ // Join a listener
+ if (listener.joinable()) {
+ listener.join();
+ }
+
ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
EvsEventType::MASTER_RELEASED);
// Client 0 programs a parameter
val0 = minVal + (std::rand() % (maxVal - minVal));
- val1 = 0;
// Rounding down
val0 = val0 - (val0 % step);
if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+ std::thread listener = std::thread(
+ [&frameHandler1, &aNotification, &listening, &eventCond] {
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
+ aTargetEvent.payload[1] = 0;
+ if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a lister starts.
+ std::unique_lock<std::mutex> lock(eventLock);
+ auto timer = std::chrono::system_clock::now();
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
// Try to turn off auto-focus
- int32_t val1 = 0;
- pCam0->getIntParameter(CameraParam::AUTO_FOCUS,
- [&result, &val1](auto status, auto value) {
+ values.clear();
+ pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
if (status == EvsResult::OK) {
- val1 = value;
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
}
});
- if (val1 != 0) {
- pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
- [&result, &val1](auto status, auto effectiveValue) {
- result = status;
- val1 = effectiveValue;
- });
- ASSERT_EQ(EvsResult::OK, result);
- ASSERT_EQ(val1, 0);
+ ASSERT_EQ(EvsResult::OK, result);
+ for (auto &&v : values) {
+ ASSERT_EQ(v, 0);
}
+
+ // Join a listener
+ if (listener.joinable()) {
+ listener.join();
+ }
+
+ // Make sure AUTO_FOCUS is off.
+ ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+ EvsEventType::PARAMETER_CHANGED);
}
+ listener = std::thread(
+ [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
+ listening = true;
+ eventCond.notify_all();
+
+ EvsEventDesc aTargetEvent;
+ aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+ aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
+ aTargetEvent.payload[1] = val0;
+ if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
+ ALOGW("A timer is expired before a target event is fired.");
+ }
+ }
+ );
+
+ // Wait until a lister starts.
+ listening = false;
+ timer = std::chrono::system_clock::now();
+ lock.lock();
+ while (!listening) {
+ eventCond.wait_until(lock, timer + 1s);
+ }
+ lock.unlock();
+
+ values.clear();
pCam0->setIntParameter(cam0Cmds[0], val0,
- [&result, &val1](auto status, auto effectiveValue) {
+ [&result, &values](auto status, auto effectiveValues) {
result = status;
- val1 = effectiveValue;
+ if (status == EvsResult::OK) {
+ for (auto &&v : effectiveValues) {
+ values.push_back(v);
+ }
+ }
});
ASSERT_EQ(EvsResult::OK, result);
+ // Join a listener
+ if (listener.joinable()) {
+ listener.join();
+ }
// Verify a change notification
- timeout =
- frameHandler1->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
- ASSERT_FALSE(timeout) << "Expected event does not arrive";
ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
EvsEventType::PARAMETER_CHANGED);
ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
cam0Cmds[0]);
- ASSERT_EQ(val1,
- static_cast<int32_t>(aNotification.payload[1]));
+ for (auto &&v : values) {
+ ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
+ }
// Turn off the display (yes, before the stream stops -- it should be handled)
pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
@@ -1282,6 +1925,7 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ activeCameras.clear();
// choose a configuration that has a frame rate faster than minReqFps.
Stream targetCfg = {};
const int32_t minReqFps = 15;
@@ -1324,6 +1968,9 @@
.withDefault(nullptr);
ASSERT_NE(pCam, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam);
+
// Set up a frame receiver object which will fire up its own thread.
sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
pDisplay,
@@ -1383,6 +2030,7 @@
// Test each reported camera
for (auto&& cam: cameraInfo) {
+ activeCameras.clear();
// choose a configuration that has a frame rate faster than minReqFps.
Stream targetCfg = {};
const int32_t minReqFps = 15;
@@ -1427,6 +2075,9 @@
.withDefault(nullptr);
ASSERT_NE(pCam0, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam0);
+
// Try to create the second camera client with different stream
// configuration.
int32_t id = targetCfg.id;
@@ -1436,6 +2087,9 @@
.withDefault(nullptr);
ASSERT_EQ(pCam1, nullptr);
+ // Store a camera handle for a clean-up
+ activeCameras.push_back(pCam0);
+
// Try again with same stream configuration.
targetCfg.id = id;
pCam1 =
@@ -1506,6 +2160,30 @@
}
+/*
+ * LogicalCameraMetadata:
+ * Opens logical camera reported by the enumerator and validate its metadata by
+ * checking its capability and locating supporting physical camera device
+ * identifiers.
+ */
+TEST_F(EvsHidlTest, LogicalCameraMetadata) {
+ ALOGI("Starting LogicalCameraMetadata test");
+
+ // Get the camera list
+ loadCameraList();
+
+ // Open and close each camera twice
+ for (auto&& cam: cameraInfo) {
+ bool isLogicalCam = false;
+ auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+ if (isLogicalCam) {
+ ASSERT_GE(devices.size(), 1) <<
+ "Logical camera device must have at least one physical camera device ID in its metadata.";
+ }
+ }
+}
+
+
int main(int argc, char** argv) {
::testing::AddGlobalTestEnvironment(EvsHidlEnvironment::Instance());
::testing::InitGoogleTest(&argc, argv);