Update IRadioResponse.hal documentation
am: 01d37ff8a1 -s ours
Change-Id: If0c2447979fb8bd6b54c3cc2063b96e608fb69b8
diff --git a/audio/2.0/config/audio_policy_configuration.xsd b/audio/2.0/config/audio_policy_configuration.xsd
index 48b9a9b..7647cad 100644
--- a/audio/2.0/config/audio_policy_configuration.xsd
+++ b/audio/2.0/config/audio_policy_configuration.xsd
@@ -49,10 +49,6 @@
<xs:selector xpath="modules/module"/>
<xs:field xpath="@name"/>
</xs:key>
- <xs:key name="devicePortNameGlobalKey">
- <xs:selector xpath="modules/module/devicePorts/devicePort"/>
- <xs:field xpath="@tagName"/>
- </xs:key>
<xs:unique name="volumeTargetUniqueness">
<xs:selector xpath="volumes/volume"/>
<xs:field xpath="@stream"/>
@@ -73,14 +69,28 @@
<!-- Enum values of IDevicesFactory::Device
TODO: generate from hidl to avoid manual sync. -->
<xs:simpleType name="halName">
- <xs:restriction base="xs:string">
- <xs:enumeration value="primary"/>
- <xs:enumeration value="a2dp"/>
- <xs:enumeration value="usb"/>
- <xs:enumeration value="r_submix"/>
- <xs:enumeration value="codec_offload"/>
- <xs:enumeration value="stub"/>
- </xs:restriction>
+ <xs:union>
+ <xs:simpleType>
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="primary"/>
+ <xs:enumeration value="a2dp"/>
+ <xs:enumeration value="usb"/>
+ <xs:enumeration value="r_submix"/>
+ <xs:enumeration value="codec_offload"/>
+ <xs:enumeration value="stub"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType>
+ <!-- Vendor eXtension names must be in the vx namespace.
+ Vendor are encouraged to namespace their module names.
+ Example for an hypothetical Google virtual reality HAL:
+ <module name="vx_google_vr" halVersion="3.0">
+ -->
+ <xs:restriction base="xs:string">
+ <xs:pattern value="vx_[_a-zA-Z0-9]+"/>
+ </xs:restriction>
+ </xs:simpleType>
+ </xs:union>
</xs:simpleType>
<xs:complexType name="modules">
<xs:annotation>
@@ -127,13 +137,15 @@
<xs:selector xpath="mixPorts/mixPort"/>
<xs:field xpath="@name"/>
</xs:unique>
- <!-- Although this key constraint is redundant with devicePortNameGlobalKey,
- the set is used to constraint defaultOutputDevice and attachedDevice
- to reference a devicePort of the same module. -->
<xs:key name="devicePortNameKey">
<xs:selector xpath="devicePorts/devicePort"/>
<xs:field xpath="@tagName"/>
</xs:key>
+ <xs:unique name="devicePortUniqueness">
+ <xs:selector xpath="devicePorts/devicePort"/>
+ <xs:field xpath="@type"/>
+ <xs:field xpath="@address"/>
+ </xs:unique>
<xs:keyref name="defaultOutputDeviceRef" refer="devicePortNameKey">
<xs:selector xpath="defaultOutputDevice"/>
<xs:field xpath="."/>
@@ -188,6 +200,7 @@
<xs:complexType>
<xs:sequence>
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="gains" type="gains" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token" use="required"/>
<xs:attribute name="role" type="role" use="required"/>
@@ -199,6 +212,10 @@
<xs:field xpath="samplingRate"/>
<xs:field xpath="channelMasks"/>
</xs:unique>
+ <xs:unique name="mixPortGainUniqueness">
+ <xs:selector xpath="gains/gain"/>
+ <xs:field xpath="@name"/>
+ </xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
@@ -360,10 +377,34 @@
</xs:restriction>
</xs:simpleType>
<xs:complexType name="profile">
- <xs:attribute name="name" type="xs:token" use="required"/>
- <xs:attribute name="format" type="audioFormat" use="required"/>
- <xs:attribute name="samplingRates" type="samplingRates" use="required"/>
- <xs:attribute name="channelMasks" type="channelMask" use="required"/>
+ <xs:attribute name="name" type="xs:token" use="optional"/>
+ <xs:attribute name="format" type="audioFormat" use="optional"/>
+ <xs:attribute name="samplingRates" type="samplingRates" use="optional"/>
+ <xs:attribute name="channelMasks" type="channelMask" use="optional"/>
+ </xs:complexType>
+ <xs:simpleType name="gainMode">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="AUDIO_GAIN_MODE_JOINT"/>
+ <xs:enumeration value="AUDIO_GAIN_MODE_CHANNELS"/>
+ <xs:enumeration value="AUDIO_GAIN_MODE_RAMP"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:complexType name="gains">
+ <xs:sequence>
+ <xs:element name="gain" minOccurs="0" maxOccurs="unbounded">
+ <xs:complexType>
+ <xs:attribute name="name" type="xs:token" use="required"/>
+ <xs:attribute name="mode" type="gainMode" use="required"/>
+ <xs:attribute name="channel_mask" type="channelMask" use="optional"/>
+ <xs:attribute name="minValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="maxValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="defaultValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="stepValueMB" type="xs:int" use="optional"/>
+ <xs:attribute name="minRampMs" type="xs:int" use="optional"/>
+ <xs:attribute name="maxRampMs" type="xs:int" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+ </xs:sequence>
</xs:complexType>
<xs:complexType name="devicePorts">
<xs:sequence>
@@ -371,10 +412,12 @@
<xs:complexType>
<xs:sequence>
<xs:element name="profile" type="profile" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element name="gains" type="gains" minOccurs="0"/>
</xs:sequence>
<xs:attribute name="tagName" type="xs:token" use="required"/>
<xs:attribute name="type" type="audioDevice" use="required"/>
<xs:attribute name="role" type="role" use="required"/>
+ <xs:attribute name="address" type="xs:string" use="optional" default=""/>
</xs:complexType>
<xs:unique name="devicePortProfileUniqueness">
<xs:selector xpath="profile"/>
@@ -382,6 +425,10 @@
<xs:field xpath="samplingRate"/>
<xs:field xpath="channelMasks"/>
</xs:unique>
+ <xs:unique name="devicePortGainUniqueness">
+ <xs:selector xpath="gains/gain"/>
+ <xs:field xpath="@name"/>
+ </xs:unique>
</xs:element>
</xs:sequence>
</xs:complexType>
diff --git a/audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp b/audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp
index 3ee44f3..a5b37af 100644
--- a/audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp
+++ b/audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp
@@ -768,11 +768,12 @@
ASSERT_GE(extract(stream->getBufferSize()),
extract(stream->getFrameSize())));
-template <class Property, class CapabilityGetter, class Getter, class Setter>
+template <class Property, class CapabilityGetter>
static void testCapabilityGetter(const string& name, IStream* stream,
- Property currentValue,
CapabilityGetter capablityGetter,
- Getter getter, Setter setter) {
+ Return<Property> (IStream::*getter)(),
+ Return<Result> (IStream::*setter)(Property),
+ bool currentMustBeSupported = true) {
hidl_vec<Property> capabilities;
ASSERT_OK((stream->*capablityGetter)(returnIn(capabilities)));
if (capabilities.size() == 0) {
@@ -783,16 +784,24 @@
doc::partialTest(name + " is not supported");
return;
};
- // TODO: This code has never been tested on a hal that supports
- // getSupportedSampleRates
- EXPECT_NE(std::find(capabilities.begin(), capabilities.end(), currentValue),
- capabilities.end())
- << "current " << name << " is not in the list of the supported ones "
- << toString(capabilities);
+
+ if (currentMustBeSupported) {
+ Property currentValue = extract((stream->*getter)());
+ EXPECT_NE(std::find(capabilities.begin(), capabilities.end(), currentValue),
+ capabilities.end())
+ << "current " << name << " is not in the list of the supported ones "
+ << toString(capabilities);
+ }
// Check that all declared supported values are indeed supported
for (auto capability : capabilities) {
- ASSERT_OK((stream->*setter)(capability));
+ auto ret = (stream->*setter)(capability);
+ ASSERT_TRUE(ret.isOk());
+ if (ret == Result::NOT_SUPPORTED) {
+ doc::partialTest("Setter is not supported");
+ return;
+ }
+ ASSERT_OK(ret);
ASSERT_EQ(capability, extract((stream->*getter)()));
}
}
@@ -800,15 +809,17 @@
TEST_IO_STREAM(SupportedSampleRate,
"Check that the stream sample rate is declared as supported",
testCapabilityGetter("getSupportedSampleRate", stream.get(),
- extract(stream->getSampleRate()),
&IStream::getSupportedSampleRates,
&IStream::getSampleRate,
- &IStream::setSampleRate))
+ &IStream::setSampleRate,
+ // getSupportedSampleRate returns the native sampling rates,
+ // (the sampling rates that can be played without resampling)
+ // but other sampling rates can be supported by the HAL.
+ false))
TEST_IO_STREAM(SupportedChannelMask,
"Check that the stream channel mask is declared as supported",
testCapabilityGetter("getSupportedChannelMask", stream.get(),
- extract(stream->getChannelMask()),
&IStream::getSupportedChannelMasks,
&IStream::getChannelMask,
&IStream::setChannelMask))
@@ -816,7 +827,6 @@
TEST_IO_STREAM(SupportedFormat,
"Check that the stream format is declared as supported",
testCapabilityGetter("getSupportedFormat", stream.get(),
- extract(stream->getFormat()),
&IStream::getSupportedFormats,
&IStream::getFormat, &IStream::setFormat))
diff --git a/audio/2.0/vts/functional/ValidateAudioConfiguration.cpp b/audio/2.0/vts/functional/ValidateAudioConfiguration.cpp
index 01324c8..ec3259a 100644
--- a/audio/2.0/vts/functional/ValidateAudioConfiguration.cpp
+++ b/audio/2.0/vts/functional/ValidateAudioConfiguration.cpp
@@ -14,9 +14,21 @@
* limitations under the License.
*/
+#include <string>
+#include <unistd.h>
+
#include "utility/ValidateXml.h"
TEST(CheckConfig, audioPolicyConfigurationValidation) {
- ASSERT_VALID_XML("/vendor/etc/audio_policy_configuration.xml",
- "/data/local/tmp/audio_policy_configuration.xsd");
+ const char* configName = "audio_policy_configuration.xml";
+ const char* possibleConfigLocations[] = {"/odm/etc", "/vendor/etc", "/system/etc"};
+ const char* configSchemaPath = "/data/local/tmp/audio_policy_configuration.xsd";
+
+ for (std::string folder : possibleConfigLocations) {
+ const auto configPath = folder + '/' + configName;
+ if (access(configPath.c_str(), R_OK) == 0) {
+ ASSERT_VALID_XML(configPath.c_str(), configSchemaPath);
+ return; // The framework does not read past the first config file found
+ }
+ }
}
diff --git a/bluetooth/1.0/vts/functional/VtsHalBluetoothV1_0TargetTest.cpp b/bluetooth/1.0/vts/functional/VtsHalBluetoothV1_0TargetTest.cpp
index d699f21..fb53366 100644
--- a/bluetooth/1.0/vts/functional/VtsHalBluetoothV1_0TargetTest.cpp
+++ b/bluetooth/1.0/vts/functional/VtsHalBluetoothV1_0TargetTest.cpp
@@ -88,8 +88,14 @@
#define EVENT_NUMBER_OF_COMPLETED_PACKETS_NUM_HANDLES 2
-#define ACL_BROADCAST_ACTIVE_SLAVE (0x1 << 4)
-#define ACL_PACKET_BOUNDARY_COMPLETE (0x3 << 6)
+#define ACL_BROADCAST_FLAG_OFFSET 6
+#define ACL_BROADCAST_FLAG_ACTIVE_SLAVE 0x1
+#define ACL_BROADCAST_ACTIVE_SLAVE (ACL_BROADCAST_FLAG_ACTIVE_SLAVE << ACL_BROADCAST_FLAG_OFFSET)
+
+#define ACL_PACKET_BOUNDARY_FLAG_OFFSET 4
+#define ACL_PACKET_BOUNDARY_FLAG_COMPLETE 0x3
+#define ACL_PACKET_BOUNDARY_COMPLETE \
+ (ACL_PACKET_BOUNDARY_FLAG_COMPLETE << ACL_PACKET_BOUNDARY_FLAG_OFFSET)
constexpr char kCallbackNameAclEventReceived[] = "aclDataReceived";
constexpr char kCallbackNameHciEventReceived[] = "hciEventReceived";
diff --git a/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp b/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp
index ebeadb1..fd048db 100644
--- a/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp
+++ b/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp
@@ -46,7 +46,8 @@
using ::android::hardware::broadcastradio::V1_0::Direction;
using ::android::hardware::broadcastradio::V1_0::ProgramInfo;
using ::android::hardware::broadcastradio::V1_0::MetaData;
-
+using ::android::hardware::broadcastradio::V1_0::MetadataKey;
+using ::android::hardware::broadcastradio::V1_0::MetadataType;
#define RETURN_IF_SKIPPED \
if (skipped) { \
@@ -229,6 +230,18 @@
bool openTuner();
bool checkAntenna();
+ /**
+ * Retrieves AM/FM band configuration from module properties.
+ *
+ * The configuration may not exist: if radio type is other than AM/FM
+ * or provided index is out of bounds.
+ * In such case, empty configuration is returned.
+ *
+ * @param idx Band index to retrieve.
+ * @return Band configuration reference.
+ */
+ const BandConfig& getBand(unsigned idx);
+
static const nsecs_t kConnectCallbacktimeoutNs = seconds_to_nanoseconds(1);
static const nsecs_t kConfigCallbacktimeoutNs = seconds_to_nanoseconds(10);
static const nsecs_t kTuneCallbacktimeoutNs = seconds_to_nanoseconds(30);
@@ -237,6 +250,7 @@
bool skipped;
sp<IBroadcastRadio> mRadio;
Properties mHalProperties;
+ bool mHalPropertiesInitialized = false;
sp<ITuner> mTuner;
sp<MyCallback> mTunerCallback;
Mutex mLock;
@@ -280,23 +294,29 @@
bool BroadcastRadioHidlTest::getProperties()
{
- if (mHalProperties.bands.size() == 0) {
- Result halResult = Result::NOT_INITIALIZED;
- Return<void> hidlReturn =
- mRadio->getProperties([&](Result result, const Properties& properties) {
- halResult = result;
- if (result == Result::OK) {
- mHalProperties = properties;
- }
- });
+ if (mHalPropertiesInitialized) return true;
- EXPECT_TRUE(hidlReturn.isOk());
- EXPECT_EQ(Result::OK, halResult);
- EXPECT_EQ(Class::AM_FM, mHalProperties.classId);
- EXPECT_GT(mHalProperties.numTuners, 0u);
+ Result halResult = Result::NOT_INITIALIZED;
+ auto hidlReturn = mRadio->getProperties([&](Result result, const Properties& properties) {
+ halResult = result;
+ if (result == Result::OK) {
+ mHalProperties = properties;
+ }
+ });
+
+ EXPECT_TRUE(hidlReturn.isOk());
+ EXPECT_EQ(Result::OK, halResult);
+ EXPECT_EQ(radioClass, mHalProperties.classId);
+ EXPECT_GT(mHalProperties.numTuners, 0u);
+ if (radioClass == Class::AM_FM) {
EXPECT_GT(mHalProperties.bands.size(), 0u);
}
- return mHalProperties.bands.size() > 0;
+
+ if (hidlReturn.isOk() && halResult == Result::OK) {
+ mHalPropertiesInitialized = true;
+ return true;
+ }
+ return false;
}
bool BroadcastRadioHidlTest::openTuner()
@@ -306,17 +326,18 @@
}
if (mTuner.get() == nullptr) {
Result halResult = Result::NOT_INITIALIZED;
- Return<void> hidlReturn =
- mRadio->openTuner(mHalProperties.bands[0], true, mTunerCallback,
- [&](Result result, const sp<ITuner>& tuner) {
- halResult = result;
- if (result == Result::OK) {
- mTuner = tuner;
- }
- });
+ auto openCb = [&](Result result, const sp<ITuner>& tuner) {
+ halResult = result;
+ if (result == Result::OK) {
+ mTuner = tuner;
+ }
+ };
+ auto hidlReturn = mRadio->openTuner(getBand(0), true, mTunerCallback, openCb);
EXPECT_TRUE(hidlReturn.isOk());
EXPECT_EQ(Result::OK, halResult);
- EXPECT_EQ(true, waitForCallback(kConfigCallbacktimeoutNs));
+ if (radioClass == Class::AM_FM) {
+ EXPECT_EQ(true, waitForCallback(kConfigCallbacktimeoutNs));
+ }
}
EXPECT_NE(nullptr, mTuner.get());
return nullptr != mTuner.get();
@@ -324,6 +345,8 @@
bool BroadcastRadioHidlTest::checkAntenna()
{
+ if (radioClass != Class::AM_FM) return true;
+
BandConfig halConfig;
Result halResult = Result::NOT_INITIALIZED;
Return<void> hidlReturn =
@@ -337,6 +360,19 @@
return ((halResult == Result::OK) && (halConfig.antennaConnected == true));
}
+const BandConfig& BroadcastRadioHidlTest::getBand(unsigned idx) {
+ static BandConfig dummyBandConfig = {};
+ if (radioClass == Class::AM_FM) {
+ EXPECT_GT(mHalProperties.bands.size(), idx);
+ if (mHalProperties.bands.size() > idx) {
+ return mHalProperties.bands[idx];
+ } else {
+ return dummyBandConfig;
+ }
+ } else {
+ return dummyBandConfig;
+ }
+}
/**
* Test IBroadcastRadio::getProperties() method
@@ -344,7 +380,7 @@
* Verifies that:
* - the HAL implements the method
* - the method returns 0 (no error)
- * - the implementation class is AM_FM
+ * - the implementation class is radioClass
* - the implementation supports at least one tuner
* - the implementation supports at one band
*/
@@ -383,22 +419,24 @@
* Test IBroadcastRadio::openTuner() method called twice.
*
* Verifies that:
- * - the openTuner method fails when called for the second time without deleting previous
- * ITuner instance
+ * - the openTuner method fails with INVALID_STATE or succeeds when called for the second time
+ * without deleting previous ITuner instance
*/
TEST_P(BroadcastRadioHidlTest, OpenTunerTwice) {
RETURN_IF_SKIPPED;
EXPECT_TRUE(openTuner());
Result halResult = Result::NOT_INITIALIZED;
- Return<void> hidlReturn =
- mRadio->openTuner(mHalProperties.bands[0], true, mTunerCallback,
- [&](Result result, const sp<ITuner>&) {
- halResult = result;
- });
+ auto openCb = [&](Result result, const sp<ITuner>&) { halResult = result; };
+ auto hidlReturn = mRadio->openTuner(getBand(0), true, mTunerCallback, openCb);
EXPECT_TRUE(hidlReturn.isOk());
- EXPECT_EQ(Result::INVALID_STATE, halResult);
- EXPECT_TRUE(waitForCallback(kConfigCallbacktimeoutNs));
+ if (halResult == Result::OK) {
+ if (radioClass == Class::AM_FM) {
+ EXPECT_TRUE(waitForCallback(kConfigCallbacktimeoutNs));
+ }
+ } else {
+ EXPECT_EQ(Result::INVALID_STATE, halResult);
+ }
}
/**
@@ -409,18 +447,22 @@
* - the methods return 0 (no error)
* - the configuration callback is received within kConfigCallbacktimeoutNs ns
* - the configuration read back from HAl has the same class Id
+ *
+ * Skipped for other radio classes than AM/FM, because setConfiguration
+ * applies only for these bands.
*/
TEST_P(BroadcastRadioHidlTest, SetAndGetConfiguration) {
+ if (radioClass != Class::AM_FM) skipped = true;
RETURN_IF_SKIPPED;
ASSERT_EQ(true, openTuner());
// test setConfiguration
mCallbackCalled = false;
- Return<Result> hidlResult = mTuner->setConfiguration(mHalProperties.bands[1]);
+ Return<Result> hidlResult = mTuner->setConfiguration(getBand(1));
EXPECT_TRUE(hidlResult.isOk());
EXPECT_EQ(Result::OK, hidlResult);
EXPECT_EQ(true, waitForCallback(kConfigCallbacktimeoutNs));
EXPECT_EQ(Result::OK, mResultCallbackData);
- EXPECT_EQ(mHalProperties.bands[1], mBandConfigCallbackData);
+ EXPECT_EQ(getBand(1), mBandConfigCallbackData);
// test getConfiguration
BandConfig halConfig;
@@ -434,7 +476,7 @@
});
EXPECT_TRUE(hidlReturn.isOk());
EXPECT_EQ(Result::OK, halResult);
- EXPECT_EQ(mHalProperties.bands[1], halConfig);
+ EXPECT_EQ(getBand(1), halConfig);
}
/**
@@ -443,8 +485,12 @@
* Verifies that:
* - the methods returns INVALID_ARGUMENTS on invalid arguments
* - the method recovers and succeeds after passing correct arguments
+ *
+ * Skipped for other radio classes than AM/FM, because setConfiguration
+ * applies only for these bands.
*/
TEST_P(BroadcastRadioHidlTest, SetConfigurationFails) {
+ if (radioClass != Class::AM_FM) skipped = true;
RETURN_IF_SKIPPED;
ASSERT_EQ(true, openTuner());
@@ -463,7 +509,7 @@
// Test setConfiguration recovering after passing good data.
mCallbackCalled = false;
- setResult = mTuner->setConfiguration(mHalProperties.bands[0]);
+ setResult = mTuner->setConfiguration(getBand(0));
EXPECT_TRUE(setResult.isOk());
EXPECT_EQ(Result::OK, setResult);
EXPECT_EQ(true, waitForCallback(kConfigCallbacktimeoutNs));
@@ -506,8 +552,12 @@
* - the method returns 0 (no error)
* - the tuned callback is received within kTuneCallbacktimeoutNs ns
* - skipping sub-channel or not does not fail the call
+ *
+ * Skipped for other radio classes than AM/FM, because step is not possible
+ * on DAB nor satellite.
*/
TEST_P(BroadcastRadioHidlTest, Step) {
+ if (radioClass != Class::AM_FM) skipped = true;
RETURN_IF_SKIPPED;
ASSERT_EQ(true, openTuner());
ASSERT_TRUE(checkAntenna());
@@ -533,20 +583,26 @@
* - the HAL implements the methods
* - the methods return 0 (no error)
* - the tuned callback is received within kTuneCallbacktimeoutNs ns after tune()
+ *
+ * Skipped for other radio classes than AM/FM, because tune to frequency
+ * is not possible on DAB nor satellite.
*/
TEST_P(BroadcastRadioHidlTest, TuneAndGetProgramInformationAndCancel) {
+ if (radioClass != Class::AM_FM) skipped = true;
RETURN_IF_SKIPPED;
ASSERT_EQ(true, openTuner());
ASSERT_TRUE(checkAntenna());
+ auto& band = getBand(0);
+
// test tune
- ASSERT_GT(mHalProperties.bands[0].spacings.size(), 0u);
- ASSERT_GT(mHalProperties.bands[0].upperLimit, mHalProperties.bands[0].lowerLimit);
+ ASSERT_GT(band.spacings.size(), 0u);
+ ASSERT_GT(band.upperLimit, band.lowerLimit);
// test scan UP
- uint32_t lowerLimit = mHalProperties.bands[0].lowerLimit;
- uint32_t upperLimit = mHalProperties.bands[0].upperLimit;
- uint32_t spacing = mHalProperties.bands[0].spacings[0];
+ uint32_t lowerLimit = band.lowerLimit;
+ uint32_t upperLimit = band.upperLimit;
+ uint32_t spacing = band.spacings[0];
uint32_t channel =
lowerLimit + (((upperLimit - lowerLimit) / 2 + spacing - 1) / spacing) * spacing;
@@ -571,11 +627,8 @@
EXPECT_TRUE(hidlReturn.isOk());
EXPECT_EQ(Result::OK, halResult);
if (mResultCallbackData == Result::OK) {
- EXPECT_EQ(true, halInfo.tuned);
EXPECT_LE(halInfo.channel, upperLimit);
EXPECT_GE(halInfo.channel, lowerLimit);
- } else {
- EXPECT_EQ(false, halInfo.tuned);
}
// test cancel
@@ -591,8 +644,12 @@
* Verifies that:
* - the method returns INVALID_ARGUMENTS when applicable
* - the method recovers and succeeds after passing correct arguments
+ *
+ * Skipped for other radio classes than AM/FM, because tune to frequency
+ * is not possible on DAB nor satellite.
*/
TEST_P(BroadcastRadioHidlTest, TuneFailsOutOfBounds) {
+ if (radioClass != Class::AM_FM) skipped = true;
RETURN_IF_SKIPPED;
ASSERT_TRUE(openTuner());
ASSERT_TRUE(checkAntenna());
@@ -622,6 +679,52 @@
EXPECT_TRUE(waitForCallback(kTuneCallbacktimeoutNs));
}
+/**
+ * Test proper image format in metadata.
+ *
+ * Verifies that:
+ * - all images in metadata are provided in-band (as a binary blob, not by id)
+ *
+ * This is a counter-test for OobImagesOnly from 1.1 VTS.
+ */
+TEST_P(BroadcastRadioHidlTest, IbImagesOnly) {
+ RETURN_IF_SKIPPED;
+ ASSERT_TRUE(openTuner());
+ ASSERT_TRUE(checkAntenna());
+
+ bool firstScan = true;
+ uint32_t firstChannel, prevChannel;
+ while (true) {
+ mCallbackCalled = false;
+ auto hidlResult = mTuner->scan(Direction::UP, true);
+ ASSERT_TRUE(hidlResult.isOk());
+ if (hidlResult == Result::TIMEOUT) {
+ ALOGI("Got timeout on scan operation");
+ break;
+ }
+ ASSERT_EQ(Result::OK, hidlResult);
+ ASSERT_EQ(true, waitForCallback(kTuneCallbacktimeoutNs));
+
+ if (firstScan) {
+ firstScan = false;
+ firstChannel = mProgramInfoCallbackData.channel;
+ } else {
+ // scanned the whole band
+ if (mProgramInfoCallbackData.channel >= firstChannel && prevChannel <= firstChannel) {
+ break;
+ }
+ }
+ prevChannel = mProgramInfoCallbackData.channel;
+
+ for (auto&& entry : mProgramInfoCallbackData.metadata) {
+ if (entry.key != MetadataKey::ICON && entry.key != MetadataKey::ART) continue;
+ EXPECT_EQ(MetadataType::RAW, entry.type);
+ EXPECT_EQ(0, entry.intValue);
+ EXPECT_GT(entry.rawValue.size(), 0u);
+ }
+ }
+}
+
INSTANTIATE_TEST_CASE_P(
BroadcastRadioHidlTestCases,
BroadcastRadioHidlTest,
diff --git a/camera/provider/2.4/vts/functional/Android.bp b/camera/provider/2.4/vts/functional/Android.bp
index 85312c1..14d7c50 100644
--- a/camera/provider/2.4/vts/functional/Android.bp
+++ b/camera/provider/2.4/vts/functional/Android.bp
@@ -31,9 +31,14 @@
"libcamera_metadata",
"libbinder",
"libgui",
- "libui"
+ "libui",
+ "libfmq",
],
- static_libs: ["VtsHalHidlTargetTestBase", "libgrallocusage"],
+ static_libs: [
+ "VtsHalHidlTargetTestBase",
+ "libgrallocusage",
+ "android.hardware.camera.common@1.0-helper",
+ ],
cflags: [
"-O0",
"-g",
diff --git a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
index c350e31..2673afd 100644
--- a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
+++ b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
@@ -15,29 +15,34 @@
*/
#define LOG_TAG "camera_hidl_hal_test"
-#include <VtsHalHidlTargetTestBase.h>
+
+#include <chrono>
+#include <mutex>
+#include <regex>
+#include <unordered_map>
+#include <condition_variable>
+
+#include <inttypes.h>
+
#include <android/hardware/camera/device/1.0/ICameraDevice.h>
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
#include <android/hardware/camera/provider/2.4/ICameraProvider.h>
-#include <android/log.h>
+#include <android/hidl/manager/1.0/IServiceManager.h>
#include <binder/MemoryHeapBase.h>
+#include <CameraMetadata.h>
+#include <CameraParameters.h>
+#include <fmq/MessageQueue.h>
#include <grallocusage/GrallocUsageConversion.h>
#include <gui/BufferItemConsumer.h>
#include <gui/BufferQueue.h>
#include <gui/Surface.h>
#include <hardware/gralloc.h>
#include <hardware/gralloc1.h>
-#include <inttypes.h>
#include <system/camera.h>
+#include <system/camera_metadata.h>
#include <ui/GraphicBuffer.h>
-#include <utils/Errors.h>
-#include <chrono>
-#include <condition_variable>
-#include <mutex>
-#include <regex>
-#include <unordered_map>
-#include "CameraParameters.h"
-#include "system/camera_metadata.h"
+
+#include <VtsHalHidlTargetTestBase.h>
using ::android::hardware::Return;
using ::android::hardware::Void;
@@ -89,8 +94,13 @@
using ::android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback;
using ::android::hardware::camera::device::V1_0::FrameCallbackFlag;
using ::android::hardware::camera::device::V1_0::HandleTimestampMessage;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::kSynchronizedReadWrite;
+using ResultMetadataQueue = MessageQueue<uint8_t, kSynchronizedReadWrite>;
+using ::android::hidl::manager::V1_0::IServiceManager;
const char kCameraPassthroughServiceName[] = "legacy/0";
+const char *kProviderFQName = "android.hardware.camera.provider@2.4::ICameraProvider";
const uint32_t kMaxPreviewWidth = 1920;
const uint32_t kMaxPreviewHeight = 1080;
const uint32_t kMaxVideoWidth = 4096;
@@ -114,25 +124,41 @@
namespace {
// "device@<version>/legacy/<id>"
- const char *kDeviceNameRE = "device@([0-9]+\\.[0-9]+)/legacy/(.+)";
+ const char *kDeviceNameRE = "device@([0-9]+\\.[0-9]+)/%s/(.+)";
const int CAMERA_DEVICE_API_VERSION_3_2 = 0x302;
const int CAMERA_DEVICE_API_VERSION_1_0 = 0x100;
const char *kHAL3_2 = "3.2";
const char *kHAL1_0 = "1.0";
- bool matchDeviceName(const hidl_string& deviceName, std::smatch& sm) {
- std::regex e(kDeviceNameRE);
+ bool matchDeviceName(const hidl_string& deviceName,
+ const hidl_string &providerType,
+ std::string* deviceVersion,
+ std::string* cameraId) {
+ ::android::String8 pattern;
+ pattern.appendFormat(kDeviceNameRE, providerType.c_str());
+ std::regex e(pattern.string());
std::string deviceNameStd(deviceName.c_str());
- return std::regex_match(deviceNameStd, sm, e);
+ std::smatch sm;
+ if (std::regex_match(deviceNameStd, sm, e)) {
+ if (deviceVersion != nullptr) {
+ *deviceVersion = sm[1];
+ }
+ if (cameraId != nullptr) {
+ *cameraId = sm[2];
+ }
+ return true;
+ }
+ return false;
}
- int getCameraDeviceVersion(const hidl_string& deviceName) {
- std::smatch sm;
- bool match = matchDeviceName(deviceName, sm);
+ int getCameraDeviceVersion(const hidl_string& deviceName,
+ const hidl_string &providerType) {
+ std::string version;
+ bool match = matchDeviceName(deviceName, providerType, &version, nullptr);
if (!match) {
return -1;
}
- std::string version = sm[1].str();
+
if (version.compare(kHAL3_2) == 0) {
// maybe switched to 3.4 or define the hidl version enumlater
return CAMERA_DEVICE_API_VERSION_3_2;
@@ -142,6 +168,45 @@
return 0;
}
+ bool parseProviderName(const std::string& name, std::string *type /*out*/,
+ uint32_t *id /*out*/) {
+ if (!type || !id) {
+ ADD_FAILURE();
+ return false;
+ }
+
+ std::string::size_type slashIdx = name.find('/');
+ if (slashIdx == std::string::npos || slashIdx == name.size() - 1) {
+ ADD_FAILURE() << "Provider name does not have / separator between type"
+ "and id";
+ return false;
+ }
+
+ std::string typeVal = name.substr(0, slashIdx);
+
+ char *endPtr;
+ errno = 0;
+ long idVal = strtol(name.c_str() + slashIdx + 1, &endPtr, 10);
+ if (errno != 0) {
+ ADD_FAILURE() << "cannot parse provider id as an integer:" <<
+ name.c_str() << strerror(errno) << errno;
+ return false;
+ }
+ if (endPtr != name.c_str() + name.size()) {
+ ADD_FAILURE() << "provider id has unexpected length " << name.c_str();
+ return false;
+ }
+ if (idVal < 0) {
+ ADD_FAILURE() << "id is negative: " << name.c_str() << idVal;
+ return false;
+ }
+
+ *type = typeVal;
+ *id = static_cast<uint32_t>(idVal);
+
+ return true;
+ }
+
Status mapToStatus(::android::status_t s) {
switch(s) {
case ::android::OK:
@@ -176,7 +241,7 @@
virtual void SetUp() override;
virtual void TearDown() override;
- sp<ICameraProvider> mProvider;
+ std::unordered_map<std::string, sp<ICameraProvider> > mProviders;
private:
CameraHidlEnvironment() {}
@@ -185,11 +250,40 @@
};
void CameraHidlEnvironment::SetUp() {
- // TODO: test the binderized mode
- mProvider = ::testing::VtsHalHidlTargetTestBase::getService<ICameraProvider>(kCameraPassthroughServiceName);
- // TODO: handle the device doesn't have any camera case
- ALOGI_IF(mProvider, "provider is not nullptr, %p", mProvider.get());
- ASSERT_NE(mProvider, nullptr);
+ sp<IServiceManager> manager = IServiceManager::getService();
+ ASSERT_NE(manager, nullptr);
+
+ manager->listByInterface(kProviderFQName,
+ [this](const hidl_vec<hidl_string> ®istered) {
+ std::string name;
+ uint32_t id;
+ sp<ICameraProvider> provider = nullptr;
+ for (size_t i = 0; i < registered.size(); i++) {
+ ASSERT_TRUE(parseProviderName(registered[i],
+ &name /*out*/, &id /*out*/));
+ provider = ICameraProvider::tryGetService(registered[i]);
+ ALOGI_IF(provider, "provider is not nullptr, %p", provider.get());
+ if (nullptr != provider.get()) {
+ mProviders.emplace(name, provider);
+ }
+ }
+ });
+
+ std::string legacyName;
+ uint32_t legacyId;
+ ASSERT_TRUE(parseProviderName(kCameraPassthroughServiceName,
+ &legacyName /*out*/, &legacyId /*out*/));
+ auto legacyIt = mProviders.find(legacyName);
+ //Add any legacy passthrough implementations
+ if (legacyIt == mProviders.end()) {
+ sp<ICameraProvider> provider = ICameraProvider::tryGetService(
+ kCameraPassthroughServiceName);
+ if (nullptr != provider.get()) {
+ mProviders.emplace(legacyName, provider);
+ }
+ }
+
+ ASSERT_FALSE(mProviders.empty());
}
void CameraHidlEnvironment::TearDown() {
@@ -253,9 +347,7 @@
size_t result = 1;
result = 31 * result + buf->numFds;
- result = 31 * result + buf->numInts;
- int length = buf->numFds + buf->numInts;
- for (int i = 0; i < length; i++) {
+ for (int i = 0; i < buf->numFds; i++) {
result = 31 * result + buf->data[i];
}
return result;
@@ -265,10 +357,8 @@
struct BufferComparator {
bool operator()(const buffer_handle_t& buf1,
const buffer_handle_t& buf2) const {
- if ((buf1->numFds == buf2->numFds) &&
- (buf1->numInts == buf2->numInts)) {
- int length = buf1->numFds + buf1->numInts;
- for (int i = 0; i < length; i++) {
+ if (buf1->numFds == buf2->numFds) {
+ for (int i = 0; i < buf1->numFds; i++) {
if (buf1->data[i] != buf2->data[i]) {
return false;
}
@@ -445,7 +535,7 @@
virtual void SetUp() override {}
virtual void TearDown() override {}
- hidl_vec<hidl_string> getCameraDeviceNames();
+ hidl_vec<hidl_string> getCameraDeviceNames(sp<ICameraProvider> provider);
struct EmptyDeviceCb : public ICameraDeviceCallback {
virtual Return<void> processCaptureResult(const hidl_vec<CaptureResult>& /*results*/) override {
@@ -521,7 +611,7 @@
CameraHidlTest *mParent; // Parent object
};
- void openCameraDevice(const std::string &name,const CameraHidlEnvironment* env,
+ void openCameraDevice(const std::string &name, sp<ICameraProvider> provider,
sp<::android::hardware::camera::device::V1_0::ICameraDevice> *device /*out*/);
void setupPreviewWindow(
const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device,
@@ -544,15 +634,17 @@
void waitForFrameLocked(DataCallbackMsg msgFrame,
std::unique_lock<std::mutex> &l);
void openEmptyDeviceSession(const std::string &name,
- const CameraHidlEnvironment* env,
+ sp<ICameraProvider> provider,
sp<ICameraDeviceSession> *session /*out*/,
camera_metadata_t **staticMeta /*out*/);
void configurePreviewStream(const std::string &name,
- const CameraHidlEnvironment* env,
+ sp<ICameraProvider> provider,
const AvailableStream *previewThreshold,
sp<ICameraDeviceSession> *session /*out*/,
Stream *previewStream /*out*/,
- HalStreamConfiguration *halStreamConfig /*out*/);
+ HalStreamConfiguration *halStreamConfig /*out*/,
+ bool *supportsPartialResults /*out*/,
+ uint32_t *partialResultCount /*out*/);
static Status getAvailableOutputStreams(camera_metadata_t *staticMeta,
std::vector<AvailableStream> &outputStreams,
const AvailableStream *threshold = nullptr);
@@ -569,11 +661,75 @@
::android::CameraParameters &cameraParams, const char *mode) ;
protected:
+
+ // In-flight queue for tracking completion of capture requests.
+ struct InFlightRequest {
+ // Set by notify() SHUTTER call.
+ nsecs_t shutterTimestamp;
+
+ bool errorCodeValid;
+ ErrorCode errorCode;
+
+ //Is partial result supported
+ bool usePartialResult;
+
+ //Partial result count expected
+ uint32_t numPartialResults;
+
+ // Message queue
+ std::shared_ptr<ResultMetadataQueue> resultQueue;
+
+ // Set by process_capture_result call with valid metadata
+ bool haveResultMetadata;
+
+ // Decremented by calls to process_capture_result with valid output
+ // and input buffers
+ ssize_t numBuffersLeft;
+
+ // A 64bit integer to index the frame number associated with this result.
+ int64_t frameNumber;
+
+ // The partial result count (index) for this capture result.
+ int32_t partialResultCount;
+
+ // For buffer drop errors, the stream ID for the stream that lost a buffer.
+ // Otherwise -1.
+ int32_t errorStreamId;
+
+ // If this request has any input buffer
+ bool hasInputBuffer;
+
+ // Result metadata
+ ::android::hardware::camera::common::V1_0::helper::CameraMetadata collectedResult;
+
+ // Buffers are added by process_capture_result when output buffers
+ // return from HAL but framework.
+ ::android::Vector<StreamBuffer> resultOutputBuffers;
+
+ InFlightRequest(ssize_t numBuffers, bool hasInput,
+ bool partialResults, uint32_t partialCount,
+ std::shared_ptr<ResultMetadataQueue> queue = nullptr) :
+ shutterTimestamp(0),
+ errorCodeValid(false),
+ errorCode(ErrorCode::ERROR_BUFFER),
+ usePartialResult(partialResults),
+ numPartialResults(partialCount),
+ resultQueue(queue),
+ haveResultMetadata(false),
+ numBuffersLeft(numBuffers),
+ frameNumber(0),
+ partialResultCount(0),
+ errorStreamId(-1),
+ hasInputBuffer(hasInput) {}
+ };
+
+ // Map from frame number to the in-flight request state
+ typedef ::android::KeyedVector<uint32_t, InFlightRequest*> InFlightMap;
+
std::mutex mLock; // Synchronize access to member variables
std::condition_variable mResultCondition; // Condition variable for incoming results
- uint32_t mResultFrameNumber; // Expected result frame number
- std::vector<StreamBuffer> mResultBuffers; // Holds stream buffers from capture result
- std::vector<ErrorMsg> mErrors; // Holds incoming error notifications
+ InFlightMap mInflightMap; // Map of all inflight requests
+
DataCallbackMsg mDataMessageTypeReceived; // Most recent message type received through data callbacks
uint32_t mVideoBufferIndex; // Buffer index of the most recent video buffer
uint32_t mVideoData; // Buffer data of the most recent video buffer
@@ -696,53 +852,180 @@
return Void();
}
+ bool notify = false;
std::unique_lock<std::mutex> l(mParent->mLock);
- const CaptureResult& result = results[0];
+ for (size_t i = 0 ; i < results.size(); i++) {
+ uint32_t frameNumber = results[i].frameNumber;
- if(mParent->mResultFrameNumber != result.frameNumber) {
- ALOGE("%s: Unexpected frame number! Expected: %u received: %u",
- __func__, mParent->mResultFrameNumber, result.frameNumber);
- ADD_FAILURE();
+ if ((results[i].result.size() == 0) &&
+ (results[i].outputBuffers.size() == 0) &&
+ (results[i].inputBuffer.buffer == nullptr) &&
+ (results[i].fmqResultSize == 0)) {
+ ALOGE("%s: No result data provided by HAL for frame %d result count: %d",
+ __func__, frameNumber, (int) results[i].fmqResultSize);
+ ADD_FAILURE();
+ break;
+ }
+
+ ssize_t idx = mParent->mInflightMap.indexOfKey(frameNumber);
+ if (::android::NAME_NOT_FOUND == idx) {
+ ALOGE("%s: Unexpected frame number! received: %u",
+ __func__, frameNumber);
+ ADD_FAILURE();
+ break;
+ }
+
+ bool isPartialResult = false;
+ bool hasInputBufferInRequest = false;
+ InFlightRequest *request = mParent->mInflightMap.editValueAt(idx);
+ ::android::hardware::camera::device::V3_2::CameraMetadata resultMetadata;
+ size_t resultSize = 0;
+ if (results[i].fmqResultSize > 0) {
+ resultMetadata.resize(results[i].fmqResultSize);
+ if (request->resultQueue == nullptr) {
+ ADD_FAILURE();
+ break;
+ }
+ if (!request->resultQueue->read(resultMetadata.data(),
+ results[i].fmqResultSize)) {
+ ALOGE("%s: Frame %d: Cannot read camera metadata from fmq,"
+ "size = %" PRIu64, __func__, frameNumber,
+ results[i].fmqResultSize);
+ ADD_FAILURE();
+ break;
+ }
+ resultSize = resultMetadata.size();
+ } else if (results[i].result.size() > 0) {
+ resultMetadata.setToExternal(const_cast<uint8_t *>(
+ results[i].result.data()), results[i].result.size());
+ resultSize = resultMetadata.size();
+ }
+
+ if (!request->usePartialResult && (resultSize > 0) &&
+ (results[i].partialResult != 1)) {
+ ALOGE("%s: Result is malformed for frame %d: partial_result %u "
+ "must be 1 if partial result is not supported", __func__,
+ frameNumber, results[i].partialResult);
+ ADD_FAILURE();
+ break;
+ }
+
+ if (results[i].partialResult != 0) {
+ request->partialResultCount = results[i].partialResult;
+ }
+
+ // Check if this result carries only partial metadata
+ if (request->usePartialResult && (resultSize > 0)) {
+ if ((results[i].partialResult > request->numPartialResults) ||
+ (results[i].partialResult < 1)) {
+ ALOGE("%s: Result is malformed for frame %d: partial_result %u"
+ " must be in the range of [1, %d] when metadata is "
+ "included in the result", __func__, frameNumber,
+ results[i].partialResult, request->numPartialResults);
+ ADD_FAILURE();
+ break;
+ }
+ request->collectedResult.append(
+ reinterpret_cast<const camera_metadata_t*>(
+ resultMetadata.data()));
+
+ isPartialResult =
+ (results[i].partialResult < request->numPartialResults);
+ }
+
+ hasInputBufferInRequest = request->hasInputBuffer;
+
+ // Did we get the (final) result metadata for this capture?
+ if ((resultSize > 0) && !isPartialResult) {
+ if (request->haveResultMetadata) {
+ ALOGE("%s: Called multiple times with metadata for frame %d",
+ __func__, frameNumber);
+ ADD_FAILURE();
+ break;
+ }
+ request->haveResultMetadata = true;
+ request->collectedResult.sort();
+ }
+
+ uint32_t numBuffersReturned = results[i].outputBuffers.size();
+ if (results[i].inputBuffer.buffer != nullptr) {
+ if (hasInputBufferInRequest) {
+ numBuffersReturned += 1;
+ } else {
+ ALOGW("%s: Input buffer should be NULL if there is no input"
+ " buffer sent in the request", __func__);
+ }
+ }
+ request->numBuffersLeft -= numBuffersReturned;
+ if (request->numBuffersLeft < 0) {
+ ALOGE("%s: Too many buffers returned for frame %d", __func__,
+ frameNumber);
+ ADD_FAILURE();
+ break;
+ }
+
+ request->resultOutputBuffers.appendArray(results[i].outputBuffers.data(),
+ results[i].outputBuffers.size());
+ // If shutter event is received notify the pending threads.
+ if (request->shutterTimestamp != 0) {
+ notify = true;
+ }
}
- size_t resultLength = result.outputBuffers.size();
- for (size_t i = 0; i < resultLength; i++) {
- mParent->mResultBuffers.push_back(result.outputBuffers[i]);
- }
-
- // TODO(epeev): Handle partial results in case client supports them and
- // verify the result against request settings.
-
l.unlock();
- mParent->mResultCondition.notify_one();
+ if (notify) {
+ mParent->mResultCondition.notify_one();
+ }
return Void();
}
Return<void> CameraHidlTest::DeviceCb::notify(
const hidl_vec<NotifyMsg>& messages) {
- const NotifyMsg& message = messages[0];
+ std::lock_guard<std::mutex> l(mParent->mLock);
- if (MsgType::ERROR == message.type) {
- {
- std::lock_guard<std::mutex> l(mParent->mLock);
- mParent->mErrors.push_back(message.msg.error);
+ for (size_t i = 0; i < messages.size(); i++) {
+ ssize_t idx = mParent->mInflightMap.indexOfKey(
+ messages[i].msg.shutter.frameNumber);
+ if (::android::NAME_NOT_FOUND == idx) {
+ ALOGE("%s: Unexpected frame number! received: %u",
+ __func__, messages[i].msg.shutter.frameNumber);
+ ADD_FAILURE();
+ break;
}
+ InFlightRequest *r = mParent->mInflightMap.editValueAt(idx);
- if ((ErrorCode::ERROR_REQUEST == message.msg.error.errorCode)
- || (ErrorCode::ERROR_BUFFER == message.msg.error.errorCode)) {
- mParent->mResultCondition.notify_one();
+ switch(messages[i].type) {
+ case MsgType::ERROR:
+ if (ErrorCode::ERROR_DEVICE == messages[i].msg.error.errorCode) {
+ ALOGE("%s: Camera reported serious device error",
+ __func__);
+ ADD_FAILURE();
+ } else {
+ r->errorCodeValid = true;
+ r->errorCode = messages[i].msg.error.errorCode;
+ r->errorStreamId = messages[i].msg.error.errorStreamId;
+ }
+ break;
+ case MsgType::SHUTTER:
+ r->shutterTimestamp = messages[i].msg.shutter.timestamp;
+ break;
+ default:
+ ALOGE("%s: Unsupported notify message %d", __func__,
+ messages[i].type);
+ ADD_FAILURE();
+ break;
}
}
+ mParent->mResultCondition.notify_one();
return Void();
}
-hidl_vec<hidl_string> CameraHidlTest::getCameraDeviceNames() {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
+hidl_vec<hidl_string> CameraHidlTest::getCameraDeviceNames(sp<ICameraProvider> provider) {
hidl_vec<hidl_string> cameraDeviceNames;
Return<void> ret;
- ret = env->mProvider->getCameraIdList(
+ ret = provider->getCameraIdList(
[&](auto status, const auto& idList) {
ALOGI("getCameraIdList returns status:%d", (int)status);
for (size_t i = 0; i < idList.size(); i++) {
@@ -760,58 +1043,63 @@
// Test if ICameraProvider::isTorchModeSupported returns Status::OK
TEST_F(CameraHidlTest, isTorchModeSupported) {
Return<void> ret;
- ret = CameraHidlEnvironment::Instance()->mProvider->isSetTorchModeSupported(
- [&](auto status, bool support) {
- ALOGI("isSetTorchModeSupported returns status:%d supported:%d",
- (int)status, support);
- ASSERT_EQ(Status::OK, status);
- });
- ASSERT_TRUE(ret.isOk());
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ ret = provider.second->isSetTorchModeSupported(
+ [&](auto status, bool support) {
+ ALOGI("isSetTorchModeSupported returns status:%d supported:%d",
+ (int)status, support);
+ ASSERT_EQ(Status::OK, status);
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
// TODO: consider removing this test if getCameraDeviceNames() has the same coverage
TEST_F(CameraHidlTest, getCameraIdList) {
Return<void> ret;
- ret = CameraHidlEnvironment::Instance()->mProvider->getCameraIdList(
- [&](auto status, const auto& idList) {
- ALOGI("getCameraIdList returns status:%d", (int)status);
- for (size_t i = 0; i < idList.size(); i++) {
- ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
- }
- ASSERT_EQ(Status::OK, status);
- // This is true for internal camera provider.
- // Not necessary hold for external cameras providers
- ASSERT_GT(idList.size(), 0u);
- });
- ASSERT_TRUE(ret.isOk());
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ ret = provider.second->getCameraIdList(
+ [&](auto status, const auto& idList) {
+ ALOGI("getCameraIdList returns status:%d", (int)status);
+ for (size_t i = 0; i < idList.size(); i++) {
+ ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str());
+ }
+ ASSERT_EQ(Status::OK, status);
+ // This is true for internal camera provider.
+ // Not necessary hold for external cameras providers
+ ASSERT_GT(idList.size(), 0u);
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
// Test if ICameraProvider::getVendorTags returns Status::OK
TEST_F(CameraHidlTest, getVendorTags) {
Return<void> ret;
- ret = CameraHidlEnvironment::Instance()->mProvider->getVendorTags(
- [&](auto status, const auto& vendorTagSecs) {
- ALOGI("getVendorTags returns status:%d numSections %zu",
- (int)status, vendorTagSecs.size());
- for (size_t i = 0; i < vendorTagSecs.size(); i++) {
- ALOGI("Vendor tag section %zu name %s",
- i, vendorTagSecs[i].sectionName.c_str());
- for (size_t j = 0; j < vendorTagSecs[i].tags.size(); j++) {
- const auto& tag = vendorTagSecs[i].tags[j];
- ALOGI("Vendor tag id %u name %s type %d",
- tag.tagId,
- tag.tagName.c_str(),
- (int) tag.tagType);
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ ret = provider.second->getVendorTags(
+ [&](auto status, const auto& vendorTagSecs) {
+ ALOGI("getVendorTags returns status:%d numSections %zu",
+ (int)status, vendorTagSecs.size());
+ for (size_t i = 0; i < vendorTagSecs.size(); i++) {
+ ALOGI("Vendor tag section %zu name %s",
+ i, vendorTagSecs[i].sectionName.c_str());
+ for (size_t j = 0; j < vendorTagSecs[i].tags.size(); j++) {
+ const auto& tag = vendorTagSecs[i].tags[j];
+ ALOGI("Vendor tag id %u name %s type %d",
+ tag.tagId,
+ tag.tagName.c_str(),
+ (int) tag.tagType);
+ }
}
- }
- ASSERT_EQ(Status::OK, status);
- });
- ASSERT_TRUE(ret.isOk());
+ ASSERT_EQ(Status::OK, status);
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
// Test if ICameraProvider::setCallback returns Status::OK
TEST_F(CameraHidlTest, setCallback) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
struct ProviderCb : public ICameraProviderCallback {
virtual Return<void> cameraDeviceStatusChange(
const hidl_string& cameraDeviceName,
@@ -830,37 +1118,49 @@
}
};
sp<ProviderCb> cb = new ProviderCb;
- auto status = env->mProvider->setCallback(cb);
- ASSERT_TRUE(status.isOk());
- ASSERT_EQ(Status::OK, status);
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ auto status = provider.second->setCallback(cb);
+ ASSERT_TRUE(status.isOk());
+ ASSERT_EQ(Status::OK, status);
+ // Reset callback since cb will go out of scope
+ status = provider.second->setCallback(nullptr);
+ ASSERT_TRUE(status.isOk());
+ ASSERT_EQ(Status::OK, status);
+ }
}
// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device
TEST_F(CameraHidlTest, getCameraDeviceInterface) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device3_2) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device3_2, nullptr);
- });
- ASSERT_TRUE(ret.isOk());
- } else if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
- name,
- [&](auto status, const auto& device1) {
- ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device1, nullptr);
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device3_2) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device3_2, nullptr);
+ });
+ ASSERT_TRUE(ret.isOk());
+ } else if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V1_x(
+ name,
+ [&](auto status, const auto& device1) {
+ ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device1, nullptr);
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -868,60 +1168,66 @@
// Verify that the device resource cost can be retrieved and the values are
// sane.
TEST_F(CameraHidlTest, getResourceCost) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
- ALOGI("getResourceCost: Testing camera device %s", name.c_str());
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_2 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
+ ALOGI("getResourceCost: Testing camera device %s", name.c_str());
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device3_2 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- ret = device3_2->getResourceCost(
- [&](auto status, const auto& resourceCost) {
- ALOGI("getResourceCost returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ALOGI(" Resource cost is %d", resourceCost.resourceCost);
- ASSERT_LE(resourceCost.resourceCost, 100u);
- for (const auto& name : resourceCost.conflictingDevices) {
- ALOGI(" Conflicting device: %s", name.c_str());
- }
- });
- ASSERT_TRUE(ret.isOk());
- } else {
- ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- ALOGI("getResourceCost: Testing camera device %s", name.c_str());
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device1 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ ret = device3_2->getResourceCost(
+ [&](auto status, const auto& resourceCost) {
+ ALOGI("getResourceCost returns status:%d", (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ALOGI(" Resource cost is %d", resourceCost.resourceCost);
+ ASSERT_LE(resourceCost.resourceCost, 100u);
+ for (const auto& name : resourceCost.conflictingDevices) {
+ ALOGI(" Conflicting device: %s", name.c_str());
+ }
+ });
+ ASSERT_TRUE(ret.isOk());
+ } else {
+ ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ ALOGI("getResourceCost: Testing camera device %s", name.c_str());
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V1_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device1 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- ret = device1->getResourceCost(
- [&](auto status, const auto& resourceCost) {
- ALOGI("getResourceCost returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ALOGI(" Resource cost is %d", resourceCost.resourceCost);
- ASSERT_LE(resourceCost.resourceCost, 100u);
- for (const auto& name : resourceCost.conflictingDevices) {
- ALOGI(" Conflicting device: %s", name.c_str());
- }
- });
- ASSERT_TRUE(ret.isOk());
+ ret = device1->getResourceCost(
+ [&](auto status, const auto& resourceCost) {
+ ALOGI("getResourceCost returns status:%d", (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ALOGI(" Resource cost is %d",
+ resourceCost.resourceCost);
+ ASSERT_LE(resourceCost.resourceCost, 100u);
+ for (const auto& name : resourceCost.conflictingDevices) {
+ ALOGI(" Conflicting device: %s", name.c_str());
+ }
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -929,126 +1235,143 @@
// Verify that the static camera info can be retrieved
// successfully.
TEST_F(CameraHidlTest, getCameraInfo) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device1 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ ALOGI("getCameraCharacteristics: Testing camera device %s",
+ name.c_str());
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V1_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device1 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- ret = device1->getCameraInfo(
- [&](auto status, const auto& info) {
- ALOGI("getCameraInfo returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- switch(info.orientation) {
- case 0:
- case 90:
- case 180:
- case 270:
- //Expected cases
- ALOGI("camera orientation: %d", info.orientation);
- break;
- default:
- FAIL() << "Unexpected camera orientation:" << info.orientation;
- }
- switch(info.facing) {
- case CameraFacing::BACK:
- case CameraFacing::FRONT:
- case CameraFacing::EXTERNAL:
- //Expected cases
- ALOGI("camera facing: %d", info.facing);
- break;
- default:
- FAIL() << "Unexpected camera facing:" << static_cast<uint32_t> (info.facing);
- }
- });
- ASSERT_TRUE(ret.isOk());
+ ret = device1->getCameraInfo(
+ [&](auto status, const auto& info) {
+ ALOGI("getCameraInfo returns status:%d", (int)status);
+ ASSERT_EQ(Status::OK, status);
+ switch(info.orientation) {
+ case 0:
+ case 90:
+ case 180:
+ case 270:
+ //Expected cases
+ ALOGI("camera orientation: %d", info.orientation);
+ break;
+ default:
+ FAIL() << "Unexpected camera orientation:" << info.orientation;
+ }
+ switch(info.facing) {
+ case CameraFacing::BACK:
+ case CameraFacing::FRONT:
+ case CameraFacing::EXTERNAL:
+ //Expected cases
+ ALOGI("camera facing: %d", info.facing);
+ break;
+ default:
+ FAIL() << "Unexpected camera facing:" << static_cast<uint32_t> (
+ info.facing);
+ }
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
// Check whether preview window can be configured
TEST_F(CameraHidlTest, setPreviewWindow) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1,
- &bufferItemConsumer /*out*/, &bufferHandler /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1,
+ &bufferItemConsumer /*out*/, &bufferHandler /*out*/);
- Return<void> ret;
- ret = device1->close();
- ASSERT_TRUE(ret.isOk());
+ Return<void> ret;
+ ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
// Verify that setting preview window fails in case device is not open
TEST_F(CameraHidlTest, setPreviewWindowInvalid) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device1 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ ALOGI("getCameraCharacteristics: Testing camera device %s",
+ name.c_str());
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V1_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device1 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- Return<Status> returnStatus = device1->setPreviewWindow(nullptr);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OPERATION_NOT_SUPPORTED, returnStatus);
+ Return<Status> returnStatus = device1->setPreviewWindow(nullptr);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OPERATION_NOT_SUPPORTED, returnStatus);
+ }
}
}
}
// Start and stop preview checking whether it gets enabled in between.
TEST_F(CameraHidlTest, startStopPreview) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1,
- &bufferItemConsumer /*out*/, &bufferHandler /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1,
+ &bufferItemConsumer /*out*/, &bufferHandler /*out*/);
- startPreview(device1);
+ startPreview(device1);
- Return<bool> returnBoolStatus = device1->previewEnabled();
- ASSERT_TRUE(returnBoolStatus.isOk());
- ASSERT_TRUE(returnBoolStatus);
+ Return<bool> returnBoolStatus = device1->previewEnabled();
+ ASSERT_TRUE(returnBoolStatus.isOk());
+ ASSERT_TRUE(returnBoolStatus);
- stopPreviewAndClose(device1);
+ stopPreviewAndClose(device1);
+ }
}
}
}
@@ -1056,599 +1379,646 @@
// Start preview without active preview window. Preview should start as soon
// as a valid active window gets configured.
TEST_F(CameraHidlTest, startStopPreviewDelayed) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- Return<Status> returnStatus = device1->setPreviewWindow(nullptr);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ Return<Status> returnStatus = device1->setPreviewWindow(nullptr);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- startPreview(device1);
+ startPreview(device1);
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
- //Preview should get enabled now
- Return<bool> returnBoolStatus = device1->previewEnabled();
- ASSERT_TRUE(returnBoolStatus.isOk());
- ASSERT_TRUE(returnBoolStatus);
+ //Preview should get enabled now
+ Return<bool> returnBoolStatus = device1->previewEnabled();
+ ASSERT_TRUE(returnBoolStatus.isOk());
+ ASSERT_TRUE(returnBoolStatus);
- stopPreviewAndClose(device1);
+ stopPreviewAndClose(device1);
+ }
}
}
}
// Verify that image capture behaves as expected along with preview callbacks.
TEST_F(CameraHidlTest, takePicture) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
- {
- std::unique_lock<std::mutex> l(mLock);
- mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
+ }
+
+ enableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME,
+ device1);
+ startPreview(device1);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ waitForFrameLocked(DataCallbackMsg::PREVIEW_FRAME, l);
+ }
+
+ disableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME,
+ device1);
+ enableMsgType((unsigned int)DataCallbackMsg::COMPRESSED_IMAGE,
+ device1);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
+ }
+
+ Return<Status> returnStatus = device1->takePicture();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ waitForFrameLocked(DataCallbackMsg::COMPRESSED_IMAGE, l);
+ }
+
+ disableMsgType((unsigned int)DataCallbackMsg::COMPRESSED_IMAGE,
+ device1);
+ stopPreviewAndClose(device1);
}
-
- enableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1);
- startPreview(device1);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- waitForFrameLocked(DataCallbackMsg::PREVIEW_FRAME, l);
- }
-
- disableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME,
- device1);
- enableMsgType((unsigned int)DataCallbackMsg::COMPRESSED_IMAGE,
- device1);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
- }
-
- Return<Status> returnStatus = device1->takePicture();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- waitForFrameLocked(DataCallbackMsg::COMPRESSED_IMAGE, l);
- }
-
- disableMsgType((unsigned int)DataCallbackMsg::COMPRESSED_IMAGE,
- device1);
- stopPreviewAndClose(device1);
}
}
}
// Image capture should fail in case preview didn't get enabled first.
TEST_F(CameraHidlTest, takePictureFail) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- Return<Status> returnStatus = device1->takePicture();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_NE(Status::OK, returnStatus);
+ Return<Status> returnStatus = device1->takePicture();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_NE(Status::OK, returnStatus);
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
// Verify that image capture can be cancelled.
TEST_F(CameraHidlTest, cancelPicture) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
- startPreview(device1);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
+ startPreview(device1);
- Return<Status> returnStatus = device1->takePicture();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ Return<Status> returnStatus = device1->takePicture();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- returnStatus = device1->cancelPicture();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ returnStatus = device1->cancelPicture();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- stopPreviewAndClose(device1);
+ stopPreviewAndClose(device1);
+ }
}
}
}
-// Image capture cancel should fail when image capture is not running.
-TEST_F(CameraHidlTest, cancelPictureFail) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+// Image capture cancel is a no-op when image capture is not running.
+TEST_F(CameraHidlTest, cancelPictureNOP) {
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
- startPreview(device1);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
+ startPreview(device1);
- Return<Status> returnStatus = device1->cancelPicture();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_NE(Status::OK, returnStatus);
+ Return<Status> returnStatus = device1->cancelPicture();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- stopPreviewAndClose(device1);
+ stopPreviewAndClose(device1);
+ }
}
}
}
// Test basic video recording.
TEST_F(CameraHidlTest, startStopRecording) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
- {
- std::unique_lock<std::mutex> l(mLock);
- mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
- }
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
+ }
- enableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1);
- startPreview(device1);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- waitForFrameLocked(DataCallbackMsg::PREVIEW_FRAME, l);
- mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
- mVideoBufferIndex = UINT32_MAX;
- }
-
- disableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1);
-
- bool videoMetaEnabled = false;
- Return<Status> returnStatus = device1->storeMetaDataInBuffers(true);
- ASSERT_TRUE(returnStatus.isOk());
- // It is allowed for devices to not support this feature
- ASSERT_TRUE((Status::OK == returnStatus) ||
- (Status::OPERATION_NOT_SUPPORTED == returnStatus));
- if (Status::OK == returnStatus) {
- videoMetaEnabled = true;
- }
-
- enableMsgType((unsigned int)DataCallbackMsg::VIDEO_FRAME, device1);
- Return<bool> returnBoolStatus = device1->recordingEnabled();
- ASSERT_TRUE(returnBoolStatus.isOk());
- ASSERT_FALSE(returnBoolStatus);
-
- returnStatus = device1->startRecording();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- waitForFrameLocked(DataCallbackMsg::VIDEO_FRAME, l);
- ASSERT_NE(UINT32_MAX, mVideoBufferIndex);
- disableMsgType((unsigned int)DataCallbackMsg::VIDEO_FRAME,
+ enableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME,
device1);
- }
+ startPreview(device1);
- returnBoolStatus = device1->recordingEnabled();
- ASSERT_TRUE(returnBoolStatus.isOk());
- ASSERT_TRUE(returnBoolStatus);
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ waitForFrameLocked(DataCallbackMsg::PREVIEW_FRAME, l);
+ mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY;
+ mVideoBufferIndex = UINT32_MAX;
+ }
- Return<void> ret;
- if (videoMetaEnabled) {
- ret = device1->releaseRecordingFrameHandle(mVideoData,
- mVideoBufferIndex, mVideoNativeHandle);
+ disableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME,
+ device1);
+
+ bool videoMetaEnabled = false;
+ Return<Status> returnStatus = device1->storeMetaDataInBuffers(
+ true);
+ ASSERT_TRUE(returnStatus.isOk());
+ // It is allowed for devices to not support this feature
+ ASSERT_TRUE((Status::OK == returnStatus) ||
+ (Status::OPERATION_NOT_SUPPORTED == returnStatus));
+ if (Status::OK == returnStatus) {
+ videoMetaEnabled = true;
+ }
+
+ enableMsgType((unsigned int)DataCallbackMsg::VIDEO_FRAME,
+ device1);
+ Return<bool> returnBoolStatus = device1->recordingEnabled();
+ ASSERT_TRUE(returnBoolStatus.isOk());
+ ASSERT_FALSE(returnBoolStatus);
+
+ returnStatus = device1->startRecording();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ waitForFrameLocked(DataCallbackMsg::VIDEO_FRAME, l);
+ ASSERT_NE(UINT32_MAX, mVideoBufferIndex);
+ disableMsgType((unsigned int)DataCallbackMsg::VIDEO_FRAME,
+ device1);
+ }
+
+ returnBoolStatus = device1->recordingEnabled();
+ ASSERT_TRUE(returnBoolStatus.isOk());
+ ASSERT_TRUE(returnBoolStatus);
+
+ Return<void> ret;
+ if (videoMetaEnabled) {
+ ret = device1->releaseRecordingFrameHandle(mVideoData,
+ mVideoBufferIndex, mVideoNativeHandle);
+ ASSERT_TRUE(ret.isOk());
+ } else {
+ ret = device1->releaseRecordingFrame(mVideoData,
+ mVideoBufferIndex);
+ ASSERT_TRUE(ret.isOk());
+ }
+
+ ret = device1->stopRecording();
ASSERT_TRUE(ret.isOk());
- } else {
- ret = device1->releaseRecordingFrame(mVideoData, mVideoBufferIndex);
- ASSERT_TRUE(ret.isOk());
+
+ stopPreviewAndClose(device1);
}
-
- ret = device1->stopRecording();
- ASSERT_TRUE(ret.isOk());
-
- stopPreviewAndClose(device1);
}
}
}
// It shouldn't be possible to start recording without enabling preview first.
TEST_F(CameraHidlTest, startRecordingFail) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- Return<bool> returnBoolStatus = device1->recordingEnabled();
- ASSERT_TRUE(returnBoolStatus.isOk());
- ASSERT_FALSE(returnBoolStatus);
+ Return<bool> returnBoolStatus = device1->recordingEnabled();
+ ASSERT_TRUE(returnBoolStatus.isOk());
+ ASSERT_FALSE(returnBoolStatus);
- Return<Status> returnStatus = device1->startRecording();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_NE(Status::OK, returnStatus);
+ Return<Status> returnStatus = device1->startRecording();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_NE(Status::OK, returnStatus);
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
// Check autofocus support if available.
TEST_F(CameraHidlTest, autoFocus) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<const char *> focusModes = {CameraParameters::FOCUS_MODE_AUTO,
- CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE,
- CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO};
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<const char *> focusModes = {CameraParameters::FOCUS_MODE_AUTO,
+ CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE,
+ CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO};
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- ::android::CameraParameters cameraParams;
- getParameters(device1, &cameraParams /*out*/);
+ ::android::CameraParameters cameraParams;
+ getParameters(device1, &cameraParams /*out*/);
- if (Status::OK != isAutoFocusModeAvailable(cameraParams,
- CameraParameters::FOCUS_MODE_AUTO)) {
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
- continue;
- }
-
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
- startPreview(device1);
- enableMsgType((unsigned int)NotifyCallbackMsg::FOCUS, device1);
-
- for (auto &iter : focusModes) {
if (Status::OK != isAutoFocusModeAvailable(cameraParams,
- iter)) {
+ CameraParameters::FOCUS_MODE_AUTO)) {
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
continue;
}
- cameraParams.set(CameraParameters::KEY_FOCUS_MODE, iter);
- setParameters(device1, cameraParams);
- {
- std::unique_lock<std::mutex> l(mLock);
- mNotifyMessage = NotifyCallbackMsg::ERROR;
- }
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
+ startPreview(device1);
+ enableMsgType((unsigned int)NotifyCallbackMsg::FOCUS, device1);
- Return<Status> returnStatus = device1->autoFocus();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ for (auto &iter : focusModes) {
+ if (Status::OK != isAutoFocusModeAvailable(cameraParams,
+ iter)) {
+ continue;
+ }
- {
- std::unique_lock<std::mutex> l(mLock);
- while (NotifyCallbackMsg::FOCUS != mNotifyMessage) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kAutoFocusTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
+ cameraParams.set(CameraParameters::KEY_FOCUS_MODE, iter);
+ setParameters(device1, cameraParams);
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mNotifyMessage = NotifyCallbackMsg::ERROR;
+ }
+
+ Return<Status> returnStatus = device1->autoFocus();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (NotifyCallbackMsg::FOCUS != mNotifyMessage) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kAutoFocusTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
}
}
- }
- disableMsgType((unsigned int)NotifyCallbackMsg::FOCUS, device1);
- stopPreviewAndClose(device1);
+ disableMsgType((unsigned int)NotifyCallbackMsg::FOCUS, device1);
+ stopPreviewAndClose(device1);
+ }
}
}
}
// In case autofocus is supported verify that it can be cancelled.
TEST_F(CameraHidlTest, cancelAutoFocus) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- ::android::CameraParameters cameraParams;
- getParameters(device1, &cameraParams /*out*/);
+ ::android::CameraParameters cameraParams;
+ getParameters(device1, &cameraParams /*out*/);
- if (Status::OK != isAutoFocusModeAvailable(cameraParams,
- CameraParameters::FOCUS_MODE_AUTO)) {
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
- continue;
+ if (Status::OK != isAutoFocusModeAvailable(cameraParams,
+ CameraParameters::FOCUS_MODE_AUTO)) {
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+
+ // It should be fine to call before preview starts.
+ ASSERT_EQ(Status::OK, device1->cancelAutoFocus());
+
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
+ startPreview(device1);
+
+ // It should be fine to call after preview starts too.
+ Return<Status> returnStatus = device1->cancelAutoFocus();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ returnStatus = device1->autoFocus();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ returnStatus = device1->cancelAutoFocus();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ stopPreviewAndClose(device1);
}
-
- // It should be fine to call before preview starts.
- ASSERT_EQ(Status::OK, device1->cancelAutoFocus());
-
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
- startPreview(device1);
-
- // It should be fine to call after preview starts too.
- Return<Status> returnStatus = device1->cancelAutoFocus();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- returnStatus = device1->autoFocus();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- returnStatus = device1->cancelAutoFocus();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- stopPreviewAndClose(device1);
}
}
}
// Check whether face detection is available and try to enable&disable.
TEST_F(CameraHidlTest, sendCommandFaceDetection) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- ::android::CameraParameters cameraParams;
- getParameters(device1, &cameraParams /*out*/);
+ ::android::CameraParameters cameraParams;
+ getParameters(device1, &cameraParams /*out*/);
- int32_t hwFaces = cameraParams.getInt(
- CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW);
- int32_t swFaces = cameraParams.getInt(
- CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW);
- if ((0 >= hwFaces) && (0 >= swFaces)) {
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
- continue;
+ int32_t hwFaces = cameraParams.getInt(
+ CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW);
+ int32_t swFaces = cameraParams.getInt(
+ CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW);
+ if ((0 >= hwFaces) && (0 >= swFaces)) {
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
+ startPreview(device1);
+
+ if (0 < hwFaces) {
+ Return<Status> returnStatus = device1->sendCommand(
+ CommandType::START_FACE_DETECTION,
+ CAMERA_FACE_DETECTION_HW, 0);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ // TODO(epeev) : Enable and check for face notifications
+ returnStatus = device1->sendCommand(
+ CommandType::STOP_FACE_DETECTION,
+ CAMERA_FACE_DETECTION_HW, 0);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ }
+
+ if (0 < swFaces) {
+ Return<Status> returnStatus = device1->sendCommand(
+ CommandType::START_FACE_DETECTION,
+ CAMERA_FACE_DETECTION_SW, 0);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ // TODO(epeev) : Enable and check for face notifications
+ returnStatus = device1->sendCommand(
+ CommandType::STOP_FACE_DETECTION,
+ CAMERA_FACE_DETECTION_SW, 0);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ }
+
+ stopPreviewAndClose(device1);
}
-
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
- startPreview(device1);
-
- if (0 < hwFaces) {
- Return<Status> returnStatus = device1->sendCommand(
- CommandType::START_FACE_DETECTION,
- CAMERA_FACE_DETECTION_HW, 0);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- // TODO(epeev) : Enable and check for face notifications
- returnStatus = device1->sendCommand(
- CommandType::STOP_FACE_DETECTION,
- CAMERA_FACE_DETECTION_HW, 0);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- }
-
- if (0 < swFaces) {
- Return<Status> returnStatus = device1->sendCommand(
- CommandType::START_FACE_DETECTION,
- CAMERA_FACE_DETECTION_SW, 0);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- // TODO(epeev) : Enable and check for face notifications
- returnStatus = device1->sendCommand(
- CommandType::STOP_FACE_DETECTION,
- CAMERA_FACE_DETECTION_SW, 0);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- }
-
- stopPreviewAndClose(device1);
}
}
}
// Check whether smooth zoom is available and try to enable&disable.
TEST_F(CameraHidlTest, sendCommandSmoothZoom) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- ::android::CameraParameters cameraParams;
- getParameters(device1, &cameraParams /*out*/);
+ ::android::CameraParameters cameraParams;
+ getParameters(device1, &cameraParams /*out*/);
- const char *smoothZoomStr = cameraParams.get(
- CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED);
- bool smoothZoomSupported = ((nullptr != smoothZoomStr) &&
- (strcmp(smoothZoomStr, CameraParameters::TRUE) == 0)) ?
- true : false;
- if (!smoothZoomSupported) {
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
- continue;
+ const char *smoothZoomStr = cameraParams.get(
+ CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED);
+ bool smoothZoomSupported = ((nullptr != smoothZoomStr) &&
+ (strcmp(smoothZoomStr, CameraParameters::TRUE) == 0)) ?
+ true : false;
+ if (!smoothZoomSupported) {
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+
+ int32_t maxZoom = cameraParams.getInt(
+ CameraParameters::KEY_MAX_ZOOM);
+ ASSERT_TRUE(0 < maxZoom);
+
+ sp<BufferItemConsumer> bufferItemConsumer;
+ sp<BufferItemHander> bufferHandler;
+ setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
+ &bufferHandler /*out*/);
+ startPreview(device1);
+ setParameters(device1, cameraParams);
+
+ Return<Status> returnStatus = device1->sendCommand(
+ CommandType::START_SMOOTH_ZOOM, maxZoom, 0);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ // TODO(epeev) : Enable and check for face notifications
+ returnStatus = device1->sendCommand(
+ CommandType::STOP_SMOOTH_ZOOM, 0, 0);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ stopPreviewAndClose(device1);
}
-
- int32_t maxZoom = cameraParams.getInt(
- CameraParameters::KEY_MAX_ZOOM);
- ASSERT_TRUE(0 < maxZoom);
-
- sp<BufferItemConsumer> bufferItemConsumer;
- sp<BufferItemHander> bufferHandler;
- setupPreviewWindow(device1, &bufferItemConsumer /*out*/,
- &bufferHandler /*out*/);
- startPreview(device1);
- setParameters(device1, cameraParams);
-
- Return<Status> returnStatus = device1->sendCommand(
- CommandType::START_SMOOTH_ZOOM, maxZoom, 0);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- // TODO(epeev) : Enable and check for face notifications
- returnStatus = device1->sendCommand(CommandType::STOP_SMOOTH_ZOOM,
- 0, 0);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- stopPreviewAndClose(device1);
}
}
}
// Basic sanity tests related to camera parameters.
TEST_F(CameraHidlTest, getSetParameters) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- ::android::CameraParameters cameraParams;
- getParameters(device1, &cameraParams /*out*/);
+ ::android::CameraParameters cameraParams;
+ getParameters(device1, &cameraParams /*out*/);
- int32_t width, height;
- cameraParams.getPictureSize(&width, &height);
- ASSERT_TRUE((0 < width) && (0 < height));
- cameraParams.getPreviewSize(&width, &height);
- ASSERT_TRUE((0 < width) && (0 < height));
- int32_t minFps, maxFps;
- cameraParams.getPreviewFpsRange(&minFps, &maxFps);
- ASSERT_TRUE((0 < minFps) && (0 < maxFps));
- ASSERT_NE(nullptr, cameraParams.getPreviewFormat());
- ASSERT_NE(nullptr, cameraParams.getPictureFormat());
- ASSERT_TRUE(strcmp(CameraParameters::PIXEL_FORMAT_JPEG,
- cameraParams.getPictureFormat()) == 0);
+ int32_t width, height;
+ cameraParams.getPictureSize(&width, &height);
+ ASSERT_TRUE((0 < width) && (0 < height));
+ cameraParams.getPreviewSize(&width, &height);
+ ASSERT_TRUE((0 < width) && (0 < height));
+ int32_t minFps, maxFps;
+ cameraParams.getPreviewFpsRange(&minFps, &maxFps);
+ ASSERT_TRUE((0 < minFps) && (0 < maxFps));
+ ASSERT_NE(nullptr, cameraParams.getPreviewFormat());
+ ASSERT_NE(nullptr, cameraParams.getPictureFormat());
+ ASSERT_TRUE(strcmp(CameraParameters::PIXEL_FORMAT_JPEG,
+ cameraParams.getPictureFormat()) == 0);
- const char *flashMode = cameraParams.get(
- CameraParameters::KEY_FLASH_MODE);
- ASSERT_TRUE((nullptr == flashMode) || (strcmp(
- CameraParameters::FLASH_MODE_OFF, flashMode) == 0));
+ const char *flashMode = cameraParams.get(
+ CameraParameters::KEY_FLASH_MODE);
+ ASSERT_TRUE((nullptr == flashMode) || (strcmp(
+ CameraParameters::FLASH_MODE_OFF, flashMode) == 0));
- const char *wbMode = cameraParams.get(
- CameraParameters::KEY_WHITE_BALANCE);
- ASSERT_TRUE((nullptr == wbMode) || (strcmp(
- CameraParameters::WHITE_BALANCE_AUTO, wbMode) == 0));
+ const char *wbMode = cameraParams.get(
+ CameraParameters::KEY_WHITE_BALANCE);
+ ASSERT_TRUE((nullptr == wbMode) || (strcmp(
+ CameraParameters::WHITE_BALANCE_AUTO, wbMode) == 0));
- const char *effect = cameraParams.get(CameraParameters::KEY_EFFECT);
- ASSERT_TRUE((nullptr == effect) || (strcmp(
- CameraParameters::EFFECT_NONE, effect) == 0));
+ const char *effect = cameraParams.get(
+ CameraParameters::KEY_EFFECT);
+ ASSERT_TRUE((nullptr == effect) || (strcmp(
+ CameraParameters::EFFECT_NONE, effect) == 0));
- ::android::Vector<::android::Size> previewSizes;
- cameraParams.getSupportedPreviewSizes(previewSizes);
- ASSERT_FALSE(previewSizes.empty());
- ::android::Vector<::android::Size> pictureSizes;
- cameraParams.getSupportedPictureSizes(pictureSizes);
- ASSERT_FALSE(pictureSizes.empty());
- const char *previewFormats = cameraParams.get(
- CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
- ASSERT_NE(nullptr, previewFormats);
- ::android::String8 previewFormatsString(previewFormats);
- ASSERT_TRUE(previewFormatsString.contains(
- CameraParameters::PIXEL_FORMAT_YUV420SP));
- ASSERT_NE(nullptr, cameraParams.get(
- CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
- ASSERT_NE(nullptr, cameraParams.get(
- CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
- const char *focusModes = cameraParams.get(
- CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
- ASSERT_NE(nullptr, focusModes);
- ::android::String8 focusModesString(focusModes);
- const char *focusMode = cameraParams.get(
- CameraParameters::KEY_FOCUS_MODE);
- ASSERT_NE(nullptr, focusMode);
- // Auto focus mode should be default
- if (focusModesString.contains(CameraParameters::FOCUS_MODE_AUTO)) {
- ASSERT_TRUE(strcmp(
- CameraParameters::FOCUS_MODE_AUTO, focusMode) == 0);
+ ::android::Vector<::android::Size> previewSizes;
+ cameraParams.getSupportedPreviewSizes(previewSizes);
+ ASSERT_FALSE(previewSizes.empty());
+ ::android::Vector<::android::Size> pictureSizes;
+ cameraParams.getSupportedPictureSizes(pictureSizes);
+ ASSERT_FALSE(pictureSizes.empty());
+ const char *previewFormats = cameraParams.get(
+ CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
+ ASSERT_NE(nullptr, previewFormats);
+ ::android::String8 previewFormatsString(previewFormats);
+ ASSERT_TRUE(previewFormatsString.contains(
+ CameraParameters::PIXEL_FORMAT_YUV420SP));
+ ASSERT_NE(nullptr, cameraParams.get(
+ CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS));
+ ASSERT_NE(nullptr, cameraParams.get(
+ CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES));
+ const char *focusModes = cameraParams.get(
+ CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ ASSERT_NE(nullptr, focusModes);
+ ::android::String8 focusModesString(focusModes);
+ const char *focusMode = cameraParams.get(
+ CameraParameters::KEY_FOCUS_MODE);
+ ASSERT_NE(nullptr, focusMode);
+ // Auto focus mode should be default
+ if (focusModesString.contains(
+ CameraParameters::FOCUS_MODE_AUTO)) {
+ ASSERT_TRUE(strcmp(
+ CameraParameters::FOCUS_MODE_AUTO, focusMode) == 0);
+ }
+ ASSERT_TRUE(0 < cameraParams.getInt(
+ CameraParameters::KEY_FOCAL_LENGTH));
+ int32_t horizontalViewAngle = cameraParams.getInt(
+ CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE);
+ ASSERT_TRUE((0 < horizontalViewAngle) &&
+ (360 >= horizontalViewAngle));
+ int32_t verticalViewAngle = cameraParams.getInt(
+ CameraParameters::KEY_VERTICAL_VIEW_ANGLE);
+ ASSERT_TRUE((0 < verticalViewAngle) &&
+ (360 >= verticalViewAngle));
+ int32_t jpegQuality = cameraParams.getInt(
+ CameraParameters::KEY_JPEG_QUALITY);
+ ASSERT_TRUE((1 <= jpegQuality) && (100 >= jpegQuality));
+ int32_t jpegThumbQuality = cameraParams.getInt(
+ CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ ASSERT_TRUE((1 <= jpegThumbQuality) &&
+ (100 >= jpegThumbQuality));
+
+ cameraParams.setPictureSize(pictureSizes[0].width,
+ pictureSizes[0].height);
+ cameraParams.setPreviewSize(previewSizes[0].width,
+ previewSizes[0].height);
+
+ setParameters(device1, cameraParams);
+ getParameters(device1, &cameraParams /*out*/);
+
+ cameraParams.getPictureSize(&width, &height);
+ ASSERT_TRUE((pictureSizes[0].width == width) &&
+ (pictureSizes[0].height == height));
+ cameraParams.getPreviewSize(&width, &height);
+ ASSERT_TRUE((previewSizes[0].width == width) &&
+ (previewSizes[0].height == height));
+
+ Return<void> ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
}
- ASSERT_TRUE(0 < cameraParams.getInt(
- CameraParameters::KEY_FOCAL_LENGTH));
- int32_t horizontalViewAngle = cameraParams.getInt(
- CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE);
- ASSERT_TRUE((0 < horizontalViewAngle) && (360 >= horizontalViewAngle));
- int32_t verticalViewAngle = cameraParams.getInt(
- CameraParameters::KEY_VERTICAL_VIEW_ANGLE);
- ASSERT_TRUE((0 < verticalViewAngle) && (360 >= verticalViewAngle));
- int32_t jpegQuality = cameraParams.getInt(
- CameraParameters::KEY_JPEG_QUALITY);
- ASSERT_TRUE((1 <= jpegQuality) && (100 >= jpegQuality));
- int32_t jpegThumbQuality = cameraParams.getInt(
- CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
- ASSERT_TRUE((1 <= jpegThumbQuality) && (100 >= jpegThumbQuality));
-
- cameraParams.setPictureSize(pictureSizes[0].width,
- pictureSizes[0].height);
- cameraParams.setPreviewSize(previewSizes[0].width,
- previewSizes[0].height);
-
- setParameters(device1, cameraParams);
- getParameters(device1, &cameraParams /*out*/);
-
- cameraParams.getPictureSize(&width, &height);
- ASSERT_TRUE((pictureSizes[0].width == width) &&
- (pictureSizes[0].height == height));
- cameraParams.getPreviewSize(&width, &height);
- ASSERT_TRUE((previewSizes[0].width == width) &&
- (previewSizes[0].height == height));
-
- Return<void> ret = device1->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
@@ -1656,39 +2026,50 @@
// Verify that the static camera characteristics can be retrieved
// successfully.
TEST_F(CameraHidlTest, getCameraCharacteristics) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
- ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
- Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_2 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
+ ALOGI("getCameraCharacteristics: Testing camera device %s",
+ name.c_str());
+ Return<void> ret;
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device3_2 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- ret = device3_2->getCameraCharacteristics(
- [&](auto status, const auto& chars) {
- ALOGI("getCameraCharacteristics returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- const camera_metadata_t* metadata = (camera_metadata_t*) chars.data();
- size_t expectedSize = chars.size();
- int result = validate_camera_metadata_structure(metadata, &expectedSize);
- ASSERT_TRUE(result == 0 || result == CAMERA_METADATA_VALIDATION_SHIFTED);
- size_t entryCount = get_camera_metadata_entry_count(metadata);
- // TODO: we can do better than 0 here. Need to check how many required
- // characteristics keys we've defined.
- ASSERT_GT(entryCount, 0u);
- ALOGI("getCameraCharacteristics metadata entry count is %zu", entryCount);
- });
- ASSERT_TRUE(ret.isOk());
+ ret = device3_2->getCameraCharacteristics(
+ [&](auto status, const auto& chars) {
+ ALOGI("getCameraCharacteristics returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ const camera_metadata_t* metadata =
+ (camera_metadata_t*) chars.data();
+ size_t expectedSize = chars.size();
+ int result = validate_camera_metadata_structure(
+ metadata, &expectedSize);
+ ASSERT_TRUE((result == 0) ||
+ (result == CAMERA_METADATA_VALIDATION_SHIFTED));
+ size_t entryCount = get_camera_metadata_entry_count(
+ metadata);
+ // TODO: we can do better than 0 here. Need to check how many required
+ // characteristics keys we've defined.
+ ASSERT_GT(entryCount, 0u);
+ ALOGI("getCameraCharacteristics metadata entry count is %zu",
+ entryCount);
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -1696,252 +2077,273 @@
//In case it is supported verify that torch can be enabled.
//Check for corresponding toch callbacks as well.
TEST_F(CameraHidlTest, setTorchMode) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- bool torchControlSupported = false;
- Return<void> ret;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ bool torchControlSupported = false;
+ Return<void> ret;
- ret = CameraHidlEnvironment::Instance()->mProvider->isSetTorchModeSupported(
- [&](auto status, bool support) {
- ALOGI("isSetTorchModeSupported returns status:%d supported:%d",
- (int)status, support);
- ASSERT_EQ(Status::OK, status);
- torchControlSupported = support;
- });
+ ret = provider.second->isSetTorchModeSupported(
+ [&](auto status, bool support) {
+ ALOGI("isSetTorchModeSupported returns status:%d supported:%d",
+ (int)status, support);
+ ASSERT_EQ(Status::OK, status);
+ torchControlSupported = support;
+ });
- sp<TorchProviderCb> cb = new TorchProviderCb(this);
- Return<Status> returnStatus = env->mProvider->setCallback(cb);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ sp<TorchProviderCb> cb = new TorchProviderCb(this);
+ Return<Status> returnStatus = provider.second->setCallback(cb);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
- ALOGI("setTorchMode: Testing camera device %s", name.c_str());
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_2 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
+ ALOGI("setTorchMode: Testing camera device %s", name.c_str());
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device3_2 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
- returnStatus = device3_2->setTorchMode(TorchMode::ON);
- ASSERT_TRUE(returnStatus.isOk());
- if (!torchControlSupported) {
- ASSERT_EQ(Status::METHOD_NOT_SUPPORTED, returnStatus);
- } else {
- ASSERT_TRUE(returnStatus == Status::OK ||
- returnStatus == Status::OPERATION_NOT_SUPPORTED);
- if (returnStatus == Status::OK) {
- {
- std::unique_lock<std::mutex> l(mTorchLock);
- while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kTorchTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mTorchCond.wait_until(l, timeout));
+ mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
+ returnStatus = device3_2->setTorchMode(TorchMode::ON);
+ ASSERT_TRUE(returnStatus.isOk());
+ if (!torchControlSupported) {
+ ASSERT_EQ(Status::METHOD_NOT_SUPPORTED, returnStatus);
+ } else {
+ ASSERT_TRUE(returnStatus == Status::OK ||
+ returnStatus == Status::OPERATION_NOT_SUPPORTED);
+ if (returnStatus == Status::OK) {
+ {
+ std::unique_lock<std::mutex> l(mTorchLock);
+ while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kTorchTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mTorchCond.wait_until(l, timeout));
+ }
+ ASSERT_EQ(TorchModeStatus::AVAILABLE_ON,
+ mTorchStatus);
+ mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
}
- ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
- mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
- }
- returnStatus = device3_2->setTorchMode(TorchMode::OFF);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ returnStatus = device3_2->setTorchMode(TorchMode::OFF);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- {
- std::unique_lock<std::mutex> l(mTorchLock);
- while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kTorchTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mTorchCond.wait_until(l, timeout));
+ {
+ std::unique_lock<std::mutex> l(mTorchLock);
+ while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kTorchTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mTorchCond.wait_until(l, timeout));
+ }
+ ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF,
+ mTorchStatus);
}
- ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
}
}
- }
- } else if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- ALOGI("dumpState: Testing camera device %s", name.c_str());
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device1 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ } else if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ ALOGI("dumpState: Testing camera device %s", name.c_str());
+ ret = provider.second->getCameraDeviceInterface_V1_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device1 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
- returnStatus = device1->setTorchMode(TorchMode::ON);
- ASSERT_TRUE(returnStatus.isOk());
- if (!torchControlSupported) {
- ASSERT_EQ(Status::METHOD_NOT_SUPPORTED, returnStatus);
- } else {
- ASSERT_TRUE(returnStatus == Status::OK ||
- returnStatus == Status::OPERATION_NOT_SUPPORTED);
- if (returnStatus == Status::OK) {
- {
- std::unique_lock<std::mutex> l(mTorchLock);
- while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kTorchTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mTorchCond.wait_until(l, timeout));
+ mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
+ returnStatus = device1->setTorchMode(TorchMode::ON);
+ ASSERT_TRUE(returnStatus.isOk());
+ if (!torchControlSupported) {
+ ASSERT_EQ(Status::METHOD_NOT_SUPPORTED, returnStatus);
+ } else {
+ ASSERT_TRUE(returnStatus == Status::OK ||
+ returnStatus == Status::OPERATION_NOT_SUPPORTED);
+ if (returnStatus == Status::OK) {
+ {
+ std::unique_lock<std::mutex> l(mTorchLock);
+ while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kTorchTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mTorchCond.wait_until(l, timeout));
+ }
+ ASSERT_EQ(TorchModeStatus::AVAILABLE_ON,
+ mTorchStatus);
+ mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
}
- ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus);
- mTorchStatus = TorchModeStatus::NOT_AVAILABLE;
- }
- returnStatus = device1->setTorchMode(TorchMode::OFF);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ returnStatus = device1->setTorchMode(TorchMode::OFF);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
- {
- std::unique_lock<std::mutex> l(mTorchLock);
- while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kTorchTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mTorchCond.wait_until(l, timeout));
+ {
+ std::unique_lock<std::mutex> l(mTorchLock);
+ while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kTorchTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mTorchCond.wait_until(l, timeout));
+ }
+ ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF,
+ mTorchStatus);
}
- ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus);
}
}
+ ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
}
- ret = device1->close();
- ASSERT_TRUE(ret.isOk());
}
- }
- returnStatus = env->mProvider->setCallback(nullptr);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ returnStatus = provider.second->setCallback(nullptr);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ }
}
// Check dump functionality.
TEST_F(CameraHidlTest, dumpState) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- Return<void> ret;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ Return<void> ret;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
- ALOGI("dumpState: Testing camera device %s", name.c_str());
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_2 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ ::android::sp<ICameraDevice> device3_2;
+ ALOGI("dumpState: Testing camera device %s", name.c_str());
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device3_2 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- native_handle_t* raw_handle = native_handle_create(1, 0);
- raw_handle->data[0] = open(kDumpOutput, O_RDWR);
- ASSERT_GE(raw_handle->data[0], 0);
- hidl_handle handle = raw_handle;
- ret= device3_2->dumpState(handle);
- ASSERT_TRUE(ret.isOk());
- close(raw_handle->data[0]);
- native_handle_delete(raw_handle);
- } else if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- ALOGI("dumpState: Testing camera device %s", name.c_str());
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device1 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ native_handle_t* raw_handle = native_handle_create(1, 0);
+ raw_handle->data[0] = open(kDumpOutput, O_RDWR);
+ ASSERT_GE(raw_handle->data[0], 0);
+ hidl_handle handle = raw_handle;
+ ret= device3_2->dumpState(handle);
+ ASSERT_TRUE(ret.isOk());
+ close(raw_handle->data[0]);
+ native_handle_delete(raw_handle);
+ } else if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ ALOGI("dumpState: Testing camera device %s", name.c_str());
+ ret = provider.second->getCameraDeviceInterface_V1_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device1 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- native_handle_t* raw_handle = native_handle_create(1, 0);
- raw_handle->data[0] = open(kDumpOutput, O_RDWR);
- ASSERT_GE(raw_handle->data[0], 0);
- hidl_handle handle = raw_handle;
- Return<Status> returnStatus = device1->dumpState(handle);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- close(raw_handle->data[0]);
- native_handle_delete(raw_handle);
+ native_handle_t* raw_handle = native_handle_create(1, 0);
+ raw_handle->data[0] = open(kDumpOutput, O_RDWR);
+ ASSERT_GE(raw_handle->data[0], 0);
+ hidl_handle handle = raw_handle;
+ Return<Status> returnStatus = device1->dumpState(handle);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ close(raw_handle->data[0]);
+ native_handle_delete(raw_handle);
+ }
}
}
}
// Open, dumpStates, then close
TEST_F(CameraHidlTest, openClose) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- Return<void> ret;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ Return<void> ret;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
- ALOGI("openClose: Testing camera device %s", name.c_str());
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_2 = device;
- });
- ASSERT_TRUE(ret.isOk());
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
+ ALOGI("openClose: Testing camera device %s", name.c_str());
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device3_2 = device;
+ });
+ ASSERT_TRUE(ret.isOk());
- sp<EmptyDeviceCb> cb = new EmptyDeviceCb;
- sp<ICameraDeviceSession> session;
- ret = device3_2->open(
- cb,
- [&](auto status, const auto& newSession) {
- ALOGI("device::open returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(newSession, nullptr);
- session = newSession;
- });
- ASSERT_TRUE(ret.isOk());
+ sp<EmptyDeviceCb> cb = new EmptyDeviceCb;
+ sp<ICameraDeviceSession> session;
+ ret = device3_2->open(
+ cb,
+ [&](auto status, const auto& newSession) {
+ ALOGI("device::open returns status:%d", (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(newSession, nullptr);
+ session = newSession;
+ });
+ ASSERT_TRUE(ret.isOk());
- native_handle_t* raw_handle = native_handle_create(1, 0);
- raw_handle->data[0] = open(kDumpOutput, O_RDWR);
- ASSERT_GE(raw_handle->data[0], 0);
- hidl_handle handle = raw_handle;
- ret = device3_2->dumpState(handle);
- ASSERT_TRUE(ret.isOk());
- close(raw_handle->data[0]);
- native_handle_delete(raw_handle);
+ native_handle_t* raw_handle = native_handle_create(1, 0);
+ raw_handle->data[0] = open(kDumpOutput, O_RDWR);
+ ASSERT_GE(raw_handle->data[0], 0);
+ hidl_handle handle = raw_handle;
+ ret = device3_2->dumpState(handle);
+ ASSERT_TRUE(ret.isOk());
+ close(raw_handle->data[0]);
+ native_handle_delete(raw_handle);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- // TODO: test all session API calls return INTERNAL_ERROR after close
- // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
- } else if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_1_0) {
- sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
- openCameraDevice(name, env, &device1 /*out*/);
- ASSERT_NE(nullptr, device1.get());
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ // TODO: test all session API calls return INTERNAL_ERROR after close
+ // TODO: keep a wp copy here and verify session cannot be promoted out of this scope
+ } else if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_1_0) {
+ sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1;
+ openCameraDevice(name, provider.second, &device1 /*out*/);
+ ASSERT_NE(nullptr, device1.get());
- native_handle_t* raw_handle = native_handle_create(1, 0);
- raw_handle->data[0] = open(kDumpOutput, O_RDWR);
- ASSERT_GE(raw_handle->data[0], 0);
- hidl_handle handle = raw_handle;
- Return<Status> returnStatus = device1->dumpState(handle);
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
- close(raw_handle->data[0]);
- native_handle_delete(raw_handle);
+ native_handle_t* raw_handle = native_handle_create(1, 0);
+ raw_handle->data[0] = open(kDumpOutput, O_RDWR);
+ ASSERT_GE(raw_handle->data[0], 0);
+ hidl_handle handle = raw_handle;
+ Return<Status> returnStatus = device1->dumpState(handle);
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+ close(raw_handle->data[0]);
+ native_handle_delete(raw_handle);
- ret = device1->close();
- ASSERT_TRUE(ret.isOk());
+ ret = device1->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -1949,71 +2351,81 @@
// Check whether all common default request settings can be sucessfully
// constructed.
TEST_F(CameraHidlTest, constructDefaultRequestSettings) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
- Return<void> ret;
- ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str());
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
- name,
- [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_2 = device;
- });
- ASSERT_TRUE(ret.isOk());
-
- sp<EmptyDeviceCb> cb = new EmptyDeviceCb;
- sp<ICameraDeviceSession> session;
- ret = device3_2->open(
- cb,
- [&](auto status, const auto& newSession) {
- ALOGI("device::open returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(newSession, nullptr);
- session = newSession;
- });
- ASSERT_TRUE(ret.isOk());
-
- for (uint32_t t = (uint32_t) RequestTemplate::PREVIEW;
- t <= (uint32_t) RequestTemplate::MANUAL; t++) {
- RequestTemplate reqTemplate = (RequestTemplate) t;
- ret = session->constructDefaultRequestSettings(
- reqTemplate,
- [&](auto status, const auto& req) {
- ALOGI("constructDefaultRequestSettings returns status:%d", (int)status);
- if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
- reqTemplate == RequestTemplate::MANUAL) {
- // optional templates
- ASSERT_TRUE(status == Status::OK || status == Status::ILLEGAL_ARGUMENT);
- } else {
- ASSERT_EQ(Status::OK, status);
- }
-
- if (status == Status::OK) {
- const camera_metadata_t* metadata =
- (camera_metadata_t*) req.data();
- size_t expectedSize = req.size();
- int result = validate_camera_metadata_structure(
- metadata, &expectedSize);
- ASSERT_TRUE(result == 0 || result == CAMERA_METADATA_VALIDATION_SHIFTED);
- size_t entryCount = get_camera_metadata_entry_count(metadata);
- // TODO: we can do better than 0 here. Need to check how many required
- // request keys we've defined for each template
- ASSERT_GT(entryCount, 0u);
- ALOGI("template %u metadata entry count is %zu", t, entryCount);
- } else {
- ASSERT_EQ(0u, req.size());
- }
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_2;
+ Return<void> ret;
+ ALOGI("constructDefaultRequestSettings: Testing camera device %s",
+ name.c_str());
+ ret = provider.second->getCameraDeviceInterface_V3_x(
+ name,
+ [&](auto status, const auto& device) {
+ ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
+ (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(device, nullptr);
+ device3_2 = device;
});
ASSERT_TRUE(ret.isOk());
+
+ sp<EmptyDeviceCb> cb = new EmptyDeviceCb;
+ sp<ICameraDeviceSession> session;
+ ret = device3_2->open(
+ cb,
+ [&](auto status, const auto& newSession) {
+ ALOGI("device::open returns status:%d", (int)status);
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_NE(newSession, nullptr);
+ session = newSession;
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ for (uint32_t t = (uint32_t) RequestTemplate::PREVIEW;
+ t <= (uint32_t) RequestTemplate::MANUAL; t++) {
+ RequestTemplate reqTemplate = (RequestTemplate) t;
+ ret = session->constructDefaultRequestSettings(
+ reqTemplate,
+ [&](auto status, const auto& req) {
+ ALOGI("constructDefaultRequestSettings returns status:%d",
+ (int)status);
+ if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG ||
+ reqTemplate == RequestTemplate::MANUAL) {
+ // optional templates
+ ASSERT_TRUE((status == Status::OK) ||
+ (status == Status::ILLEGAL_ARGUMENT));
+ } else {
+ ASSERT_EQ(Status::OK, status);
+ }
+
+ if (status == Status::OK) {
+ const camera_metadata_t* metadata =
+ (camera_metadata_t*) req.data();
+ size_t expectedSize = req.size();
+ int result = validate_camera_metadata_structure(
+ metadata, &expectedSize);
+ ASSERT_TRUE((result == 0) ||
+ (result == CAMERA_METADATA_VALIDATION_SHIFTED));
+ size_t entryCount =
+ get_camera_metadata_entry_count(metadata);
+ // TODO: we can do better than 0 here. Need to check how many required
+ // request keys we've defined for each template
+ ASSERT_GT(entryCount, 0u);
+ ALOGI("template %u metadata entry count is %zu",
+ t, entryCount);
+ } else {
+ ASSERT_EQ(0u, req.size());
+ }
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
@@ -2021,105 +2433,98 @@
// Verify that all supported stream formats and sizes can be configured
// successfully.
TEST_F(CameraHidlTest, configureStreamsAvailableOutputs) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputStreams;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputStreams;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_t *staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- openEmptyDeviceSession(name, env, &session /*out*/,
- &staticMeta /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_t *staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ openEmptyDeviceSession(name, provider.second, &session /*out*/,
+ &staticMeta /*out*/);
- outputStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputStreams));
- ASSERT_NE(0u, outputStreams.size());
+ outputStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputStreams));
+ ASSERT_NE(0u, outputStreams.size());
- int32_t streamId = 0;
- for (auto &it : outputStreams) {
- Stream stream = {streamId, StreamType::OUTPUT,
- static_cast<uint32_t> (it.width),
- static_cast<uint32_t> (it.height),
- static_cast<PixelFormat> (it.format), 0, 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {stream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- ASSERT_EQ(halConfig.streams[0].id, streamId);
- });
+ int32_t streamId = 0;
+ for (auto &it : outputStreams) {
+ Stream stream = {streamId, StreamType::OUTPUT,
+ static_cast<uint32_t> (it.width),
+ static_cast<uint32_t> (it.height),
+ static_cast<PixelFormat> (it.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {stream};
+ StreamConfiguration config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].id, streamId);
+ });
+ ASSERT_TRUE(ret.isOk());
+ streamId++;
+ }
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
ASSERT_TRUE(ret.isOk());
- streamId++;
}
-
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
// Check for correct handling of invalid/incorrect configuration parameters.
TEST_F(CameraHidlTest, configureStreamsInvalidOutputs) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputStreams;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputStreams;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_t *staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- openEmptyDeviceSession(name, env, &session /*out*/,
- &staticMeta /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_t *staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ openEmptyDeviceSession(name, provider.second, &session /*out*/,
+ &staticMeta /*out*/);
- outputStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputStreams));
- ASSERT_NE(0u, outputStreams.size());
+ outputStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputStreams));
+ ASSERT_NE(0u, outputStreams.size());
- int32_t streamId = 0;
- Stream stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (0),
- static_cast<uint32_t> (0),
- static_cast<PixelFormat> (outputStreams[0].format),
- 0, 0, StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {stream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [] (Status s,
- HalStreamConfiguration) {
- ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
- (Status::INTERNAL_ERROR == s));
- });
- ASSERT_TRUE(ret.isOk());
+ int32_t streamId = 0;
+ Stream stream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (0),
+ static_cast<uint32_t> (0),
+ static_cast<PixelFormat> (outputStreams[0].format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {stream};
+ StreamConfiguration config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [] (Status s,
+ HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ ASSERT_TRUE(ret.isOk());
- stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (UINT32_MAX),
- static_cast<uint32_t> (UINT32_MAX),
- static_cast<PixelFormat> (outputStreams[0].format),
- 0, 0, StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [] (Status s,
- HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- ASSERT_TRUE(ret.isOk());
-
- for (auto &it : outputStreams) {
stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (it.width),
- static_cast<uint32_t> (it.height),
- static_cast<PixelFormat> (UINT32_MAX),
- 0, 0, StreamRotation::ROTATION_0};
+ static_cast<uint32_t> (UINT32_MAX),
+ static_cast<uint32_t> (UINT32_MAX),
+ static_cast<PixelFormat> (outputStreams[0].format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ StreamRotation::ROTATION_0};
streams[0] = stream;
config = {streams,
StreamConfigurationMode::NORMAL_MODE};
@@ -2129,24 +2534,42 @@
});
ASSERT_TRUE(ret.isOk());
- stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (it.width),
- static_cast<uint32_t> (it.height),
- static_cast<PixelFormat> (it.format),
- 0, 0, static_cast<StreamRotation> (UINT32_MAX)};
- streams[0] = stream;
- config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [] (Status s,
- HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
+ for (auto &it : outputStreams) {
+ stream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (it.width),
+ static_cast<uint32_t> (it.height),
+ static_cast<PixelFormat> (UINT32_MAX),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [] (Status s,
+ HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (it.width),
+ static_cast<uint32_t> (it.height),
+ static_cast<PixelFormat> (it.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ static_cast<StreamRotation> (UINT32_MAX)};
+ streams[0] = stream;
+ config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [] (Status s,
+ HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
+
+ free_camera_metadata(staticMeta);
+ ret = session->close();
ASSERT_TRUE(ret.isOk());
}
-
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
@@ -2154,83 +2577,88 @@
// Check whether all supported ZSL output stream combinations can be
// configured successfully.
TEST_F(CameraHidlTest, configureStreamsZSLInputOutputs) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> inputStreams;
- std::vector<AvailableZSLInputOutput> inputOutputMap;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> inputStreams;
+ std::vector<AvailableZSLInputOutput> inputOutputMap;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_t *staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- openEmptyDeviceSession(name, env, &session /*out*/,
- &staticMeta /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_t *staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ openEmptyDeviceSession(name, provider.second, &session /*out*/,
+ &staticMeta /*out*/);
- Status rc = isZSLModeAvailable(staticMeta);
- if (Status::METHOD_NOT_SUPPORTED == rc) {
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
- continue;
- }
- ASSERT_EQ(Status::OK, rc);
+ Status rc = isZSLModeAvailable(staticMeta);
+ if (Status::METHOD_NOT_SUPPORTED == rc) {
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+ ASSERT_EQ(Status::OK, rc);
- inputStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- inputStreams));
- ASSERT_NE(0u, inputStreams.size());
-
- inputOutputMap.clear();
- ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta,
- inputOutputMap));
- ASSERT_NE(0u, inputOutputMap.size());
-
- int32_t streamId = 0;
- for (auto &inputIter : inputOutputMap) {
- AvailableStream input;
- ASSERT_EQ(Status::OK,
- findLargestSize(inputStreams, inputIter.inputFormat, input));
+ inputStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ inputStreams));
ASSERT_NE(0u, inputStreams.size());
- AvailableStream outputThreshold = {INT32_MAX, INT32_MAX,
- inputIter.outputFormat};
- std::vector<AvailableStream> outputStreams;
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputStreams, &outputThreshold));
- for (auto &outputIter : outputStreams) {
- Stream zslStream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (input.width),
- static_cast<uint32_t> (input.height),
- static_cast<PixelFormat> (input.format),
- GRALLOC_USAGE_HW_CAMERA_ZSL, 0,
- StreamRotation::ROTATION_0};
- Stream inputStream = {streamId++, StreamType::INPUT,
- static_cast<uint32_t> (input.width),
- static_cast<uint32_t> (input.height),
- static_cast<PixelFormat> (input.format), 0, 0,
- StreamRotation::ROTATION_0};
- Stream outputStream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (outputIter.width),
- static_cast<uint32_t> (outputIter.height),
- static_cast<PixelFormat> (outputIter.format), 0, 0,
- StreamRotation::ROTATION_0};
+ inputOutputMap.clear();
+ ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta,
+ inputOutputMap));
+ ASSERT_NE(0u, inputOutputMap.size());
- ::android::hardware::hidl_vec<Stream> streams = {
- inputStream, zslStream, outputStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(3u, halConfig.streams.size());
- });
- ASSERT_TRUE(ret.isOk());
+ int32_t streamId = 0;
+ for (auto &inputIter : inputOutputMap) {
+ AvailableStream input;
+ ASSERT_EQ(Status::OK,
+ findLargestSize(inputStreams, inputIter.inputFormat, input));
+ ASSERT_NE(0u, inputStreams.size());
+
+ AvailableStream outputThreshold = {INT32_MAX, INT32_MAX,
+ inputIter.outputFormat};
+ std::vector<AvailableStream> outputStreams;
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputStreams, &outputThreshold));
+ for (auto &outputIter : outputStreams) {
+ Stream zslStream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (input.width),
+ static_cast<uint32_t> (input.height),
+ static_cast<PixelFormat> (input.format),
+ GRALLOC_USAGE_HW_CAMERA_ZSL, 0,
+ StreamRotation::ROTATION_0};
+ Stream inputStream = {streamId++, StreamType::INPUT,
+ static_cast<uint32_t> (input.width),
+ static_cast<uint32_t> (input.height),
+ static_cast<PixelFormat> (input.format),
+ 0, 0,
+ StreamRotation::ROTATION_0};
+ Stream outputStream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (outputIter.width),
+ static_cast<uint32_t> (outputIter.height),
+ static_cast<PixelFormat> (outputIter.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ StreamRotation::ROTATION_0};
+
+ ::android::hardware::hidl_vec<Stream> streams = {
+ inputStream, zslStream, outputStream};
+ StreamConfiguration config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(3u, halConfig.streams.size());
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
- }
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -2238,62 +2666,67 @@
// Verify that all supported preview + still capture stream combinations
// can be configured successfully.
TEST_F(CameraHidlTest, configureStreamsPreviewStillOutputs) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputBlobStreams;
- std::vector<AvailableStream> outputPreviewStreams;
- AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- AvailableStream blobThreshold = {INT32_MAX, INT32_MAX,
- static_cast<int32_t>(PixelFormat::BLOB)};
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputBlobStreams;
+ std::vector<AvailableStream> outputPreviewStreams;
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ AvailableStream blobThreshold = {INT32_MAX, INT32_MAX,
+ static_cast<int32_t>(PixelFormat::BLOB)};
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_t *staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- openEmptyDeviceSession(name, env, &session /*out*/,
- &staticMeta /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_t *staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ openEmptyDeviceSession(name, provider.second, &session /*out*/,
+ &staticMeta /*out*/);
- outputBlobStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputBlobStreams, &blobThreshold));
- ASSERT_NE(0u, outputBlobStreams.size());
+ outputBlobStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputBlobStreams, &blobThreshold));
+ ASSERT_NE(0u, outputBlobStreams.size());
- outputPreviewStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputPreviewStreams, &previewThreshold));
- ASSERT_NE(0u, outputPreviewStreams.size());
+ outputPreviewStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputPreviewStreams, &previewThreshold));
+ ASSERT_NE(0u, outputPreviewStreams.size());
- int32_t streamId = 0;
- for (auto &blobIter : outputBlobStreams) {
- for (auto &previewIter : outputPreviewStreams) {
- Stream previewStream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (previewIter.width),
- static_cast<uint32_t> (previewIter.height),
- static_cast<PixelFormat> (previewIter.format), 0, 0,
- StreamRotation::ROTATION_0};
- Stream blobStream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (blobIter.width),
- static_cast<uint32_t> (blobIter.height),
- static_cast<PixelFormat> (blobIter.format), 0, 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {
- previewStream, blobStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(2u, halConfig.streams.size());
- });
- ASSERT_TRUE(ret.isOk());
+ int32_t streamId = 0;
+ for (auto &blobIter : outputBlobStreams) {
+ for (auto &previewIter : outputPreviewStreams) {
+ Stream previewStream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (previewIter.width),
+ static_cast<uint32_t> (previewIter.height),
+ static_cast<PixelFormat> (previewIter.format),
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0,
+ StreamRotation::ROTATION_0};
+ Stream blobStream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (blobIter.width),
+ static_cast<uint32_t> (blobIter.height),
+ static_cast<PixelFormat> (blobIter.format),
+ GRALLOC1_CONSUMER_USAGE_CPU_READ, 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {
+ previewStream, blobStream};
+ StreamConfiguration config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
- }
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -2302,92 +2735,99 @@
// configured. Additionally check for common invalid inputs when
// using this mode.
TEST_F(CameraHidlTest, configureStreamsConstrainedOutputs) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_t *staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- openEmptyDeviceSession(name, env, &session /*out*/,
- &staticMeta /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_t *staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ openEmptyDeviceSession(name, provider.second, &session /*out*/,
+ &staticMeta /*out*/);
- Status rc = isConstrainedModeAvailable(staticMeta);
- if (Status::METHOD_NOT_SUPPORTED == rc) {
+ Status rc = isConstrainedModeAvailable(staticMeta);
+ if (Status::METHOD_NOT_SUPPORTED == rc) {
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ continue;
+ }
+ ASSERT_EQ(Status::OK, rc);
+
+ AvailableStream hfrStream;
+ rc = pickConstrainedModeSize(staticMeta, hfrStream);
+ ASSERT_EQ(Status::OK, rc);
+
+ int32_t streamId = 0;
+ Stream stream = {streamId, StreamType::OUTPUT,
+ static_cast<uint32_t> (hfrStream.width),
+ static_cast<uint32_t> (hfrStream.height),
+ static_cast<PixelFormat> (hfrStream.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {stream};
+ StreamConfiguration config = {streams,
+ StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(1u, halConfig.streams.size());
+ ASSERT_EQ(halConfig.streams[0].id, streamId);
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (0),
+ static_cast<uint32_t> (0),
+ static_cast<PixelFormat> (hfrStream.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config = {streams,
+ StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration) {
+ ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
+ (Status::INTERNAL_ERROR == s));
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (UINT32_MAX),
+ static_cast<uint32_t> (UINT32_MAX),
+ static_cast<PixelFormat> (hfrStream.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config = {streams,
+ StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ stream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (hfrStream.width),
+ static_cast<uint32_t> (hfrStream.height),
+ static_cast<PixelFormat> (UINT32_MAX),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, 0,
+ StreamRotation::ROTATION_0};
+ streams[0] = stream;
+ config = {streams,
+ StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
+ ret = session->configureStreams(config, [streamId] (Status s,
+ HalStreamConfiguration) {
+ ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
+ });
+ ASSERT_TRUE(ret.isOk());
+
+ free_camera_metadata(staticMeta);
ret = session->close();
ASSERT_TRUE(ret.isOk());
- continue;
}
- ASSERT_EQ(Status::OK, rc);
-
- AvailableStream hfrStream;
- rc = pickConstrainedModeSize(staticMeta, hfrStream);
- ASSERT_EQ(Status::OK, rc);
-
- int32_t streamId = 0;
- Stream stream = {streamId, StreamType::OUTPUT,
- static_cast<uint32_t> (hfrStream.width),
- static_cast<uint32_t> (hfrStream.height),
- static_cast<PixelFormat> (hfrStream.format), 0, 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {stream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(1u, halConfig.streams.size());
- ASSERT_EQ(halConfig.streams[0].id, streamId);
- });
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (0),
- static_cast<uint32_t> (0),
- static_cast<PixelFormat> (hfrStream.format), 0, 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams,
- StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration) {
- ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) ||
- (Status::INTERNAL_ERROR == s));
- });
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (UINT32_MAX),
- static_cast<uint32_t> (UINT32_MAX),
- static_cast<PixelFormat> (hfrStream.format), 0, 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams,
- StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- ASSERT_TRUE(ret.isOk());
-
- stream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (hfrStream.width),
- static_cast<uint32_t> (hfrStream.height),
- static_cast<PixelFormat> (UINT32_MAX), 0, 0,
- StreamRotation::ROTATION_0};
- streams[0] = stream;
- config = {streams,
- StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration) {
- ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s);
- });
- ASSERT_TRUE(ret.isOk());
-
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
@@ -2395,176 +2835,218 @@
// Verify that all supported video + snapshot stream combinations can
// be configured successfully.
TEST_F(CameraHidlTest, configureStreamsVideoStillOutputs) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputBlobStreams;
- std::vector<AvailableStream> outputVideoStreams;
- AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
- static_cast<int32_t>(PixelFormat::BLOB)};
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputBlobStreams;
+ std::vector<AvailableStream> outputVideoStreams;
+ AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight,
+ static_cast<int32_t>(PixelFormat::BLOB)};
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- camera_metadata_t *staticMeta;
- Return<void> ret;
- sp<ICameraDeviceSession> session;
- openEmptyDeviceSession(name, env, &session /*out*/,
- &staticMeta /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_t *staticMeta;
+ Return<void> ret;
+ sp<ICameraDeviceSession> session;
+ openEmptyDeviceSession(name, provider.second, &session /*out*/,
+ &staticMeta /*out*/);
- outputBlobStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputBlobStreams, &blobThreshold));
- ASSERT_NE(0u, outputBlobStreams.size());
+ outputBlobStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputBlobStreams, &blobThreshold));
+ ASSERT_NE(0u, outputBlobStreams.size());
- outputVideoStreams.clear();
- ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
- outputVideoStreams, &videoThreshold));
- ASSERT_NE(0u, outputVideoStreams.size());
+ outputVideoStreams.clear();
+ ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta,
+ outputVideoStreams, &videoThreshold));
+ ASSERT_NE(0u, outputVideoStreams.size());
- int32_t streamId = 0;
- for (auto &blobIter : outputBlobStreams) {
- for (auto &videoIter : outputVideoStreams) {
- Stream videoStream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (videoIter.width),
- static_cast<uint32_t> (videoIter.height),
- static_cast<PixelFormat> (videoIter.format), 0, 0,
- StreamRotation::ROTATION_0};
- Stream blobStream = {streamId++, StreamType::OUTPUT,
- static_cast<uint32_t> (blobIter.width),
- static_cast<uint32_t> (blobIter.height),
- static_cast<PixelFormat> (blobIter.format),
- GRALLOC_USAGE_HW_VIDEO_ENCODER, 0,
- StreamRotation::ROTATION_0};
- ::android::hardware::hidl_vec<Stream> streams = {
- videoStream, blobStream};
- StreamConfiguration config = {streams,
- StreamConfigurationMode::NORMAL_MODE};
- ret = session->configureStreams(config, [streamId] (Status s,
- HalStreamConfiguration halConfig) {
- ASSERT_EQ(Status::OK, s);
- ASSERT_EQ(2u, halConfig.streams.size());
- });
- ASSERT_TRUE(ret.isOk());
+ int32_t streamId = 0;
+ for (auto &blobIter : outputBlobStreams) {
+ for (auto &videoIter : outputVideoStreams) {
+ Stream videoStream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (videoIter.width),
+ static_cast<uint32_t> (videoIter.height),
+ static_cast<PixelFormat> (videoIter.format),
+ GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, 0,
+ StreamRotation::ROTATION_0};
+ Stream blobStream = {streamId++, StreamType::OUTPUT,
+ static_cast<uint32_t> (blobIter.width),
+ static_cast<uint32_t> (blobIter.height),
+ static_cast<PixelFormat> (blobIter.format),
+ GRALLOC1_CONSUMER_USAGE_CPU_READ, 0,
+ StreamRotation::ROTATION_0};
+ ::android::hardware::hidl_vec<Stream> streams = {
+ videoStream, blobStream};
+ StreamConfiguration config = {streams,
+ StreamConfigurationMode::NORMAL_MODE};
+ ret = session->configureStreams(config, [streamId] (
+ Status s, HalStreamConfiguration halConfig) {
+ ASSERT_EQ(Status::OK, s);
+ ASSERT_EQ(2u, halConfig.streams.size());
+ });
+ ASSERT_TRUE(ret.isOk());
+ }
}
- }
- free_camera_metadata(staticMeta);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ free_camera_metadata(staticMeta);
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
// Generate and verify a camera capture request
TEST_F(CameraHidlTest, processCaptureRequestPreview) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- uint64_t bufferId = 1;
- uint32_t frameNumber = 1;
- ::android::hardware::hidl_vec<uint8_t> settings;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ uint64_t bufferId = 1;
+ uint32_t frameNumber = 1;
+ ::android::hardware::hidl_vec<uint8_t> settings;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- configurePreviewStream(name, env, &previewThreshold,
- &session /*out*/, &previewStream /*out*/,
- &halStreamConfig /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, provider.second, &previewThreshold,
+ &session /*out*/, &previewStream /*out*/,
+ &halStreamConfig /*out*/, &supportsPartialResults /*out*/,
+ &partialResultCount/*out*/);
- RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
- Return<void> ret;
- ret = session->constructDefaultRequestSettings(reqTemplate,
- [&](auto status, const auto& req) {
- ASSERT_EQ(Status::OK, status);
- settings = req; });
- ASSERT_TRUE(ret.isOk());
-
- sp<GraphicBuffer> gb = new GraphicBuffer(
- previewStream.width, previewStream.height,
- static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat),
- 1, android_convertGralloc1To0Usage(
- halStreamConfig.streams[0].producerUsage,
- halStreamConfig.streams[0].consumerUsage));
- ASSERT_NE(nullptr, gb.get());
- StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
- bufferId, hidl_handle(gb->getNativeBuffer()->handle),
- BufferStatus::OK, nullptr, nullptr};
- ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {
- outputBuffer};
- StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
- BufferStatus::ERROR, nullptr, nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, outputBuffers};
-
- {
- std::unique_lock<std::mutex> l(mLock);
- mResultBuffers.clear();
- mResultFrameNumber = frameNumber;
- }
-
- Status status = Status::INTERNAL_ERROR;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- Return<void> returnStatus = session->processCaptureRequest(
- {request},
- cachesToRemove,
- [&status, &numRequestProcessed] (auto s, uint32_t n) {
- status = s;
- numRequestProcessed = n;
+ std::shared_ptr<ResultMetadataQueue> resultQueue;
+ auto resultQueueRet = session->getCaptureResultMetadataQueue(
+ [&resultQueue](const auto& descriptor) {
+ resultQueue = std::make_shared<ResultMetadataQueue>(
+ descriptor);
+ if (!resultQueue->isValid() ||
+ resultQueue->availableToWrite() <= 0) {
+ ALOGE("%s: HAL returns empty result metadata fmq,"
+ " not use it", __func__);
+ resultQueue = nullptr;
+ // Don't use the queue onwards.
+ }
});
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, status);
- ASSERT_EQ(numRequestProcessed, 1u);
+ ASSERT_TRUE(resultQueueRet.isOk());
- {
- std::unique_lock<std::mutex> l(mLock);
- while (0 == mResultBuffers.size()) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kStreamBufferTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
+ InFlightRequest inflightReq = {1, false, supportsPartialResults,
+ partialResultCount, resultQueue};
+
+ RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
+ Return<void> ret;
+ ret = session->constructDefaultRequestSettings(reqTemplate,
+ [&](auto status, const auto& req) {
+ ASSERT_EQ(Status::OK, status);
+ settings = req; });
+ ASSERT_TRUE(ret.isOk());
+
+ sp<GraphicBuffer> gb = new GraphicBuffer(
+ previewStream.width, previewStream.height,
+ static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat),
+ 1, android_convertGralloc1To0Usage(
+ halStreamConfig.streams[0].producerUsage,
+ halStreamConfig.streams[0].consumerUsage));
+ ASSERT_NE(nullptr, gb.get());
+ StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
+ bufferId, hidl_handle(gb->getNativeBuffer()->handle),
+ BufferStatus::OK, nullptr, nullptr};
+ ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {
+ outputBuffer};
+ StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
+ BufferStatus::ERROR, nullptr, nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */,
+ settings, emptyInputBuffer, outputBuffers};
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mInflightMap.clear();
+ mInflightMap.add(frameNumber, &inflightReq);
}
- ASSERT_EQ(BufferStatus::OK, mResultBuffers[0].status);
- ASSERT_EQ(previewStream.id, mResultBuffers[0].streamId);
+ Status status = Status::INTERNAL_ERROR;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ Return<void> returnStatus = session->processCaptureRequest(
+ {request},
+ cachesToRemove,
+ [&status, &numRequestProcessed] (auto s, uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_EQ(numRequestProcessed, 1u);
- request.frameNumber++;
- //Empty settings should be supported after the first call
- //for repeating requests.
- request.settings.setToExternal(nullptr, 0, true);
- mResultBuffers.clear();
- mResultFrameNumber++;
- }
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (!inflightReq.errorCodeValid &&
+ ((0 < inflightReq.numBuffersLeft) ||
+ (!inflightReq.haveResultMetadata))) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kStreamBufferTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
- returnStatus = session->processCaptureRequest(
- {request},
- cachesToRemove,
- [&status, &numRequestProcessed] (auto s, uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, status);
- ASSERT_EQ(numRequestProcessed, 1u);
+ ASSERT_FALSE(inflightReq.errorCodeValid);
+ ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
+ ASSERT_EQ(previewStream.id,
+ inflightReq.resultOutputBuffers[0].streamId);
- {
- std::unique_lock<std::mutex> l(mLock);
- while (0 == mResultBuffers.size()) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kStreamBufferTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
+ request.frameNumber++;
+ //Empty settings should be supported after the first call
+ //for repeating requests.
+ request.settings.setToExternal(nullptr, 0, true);
+ // The buffer has been registered to HAL by bufferId, so per
+ // API contract we should send a null handle for this buffer
+ request.outputBuffers[0].buffer = nullptr;
+ mInflightMap.clear();
+ inflightReq = {1, false, supportsPartialResults,
+ partialResultCount, resultQueue};
+ mInflightMap.add(request.frameNumber, &inflightReq);
}
- ASSERT_EQ(BufferStatus::OK, mResultBuffers[0].status);
- ASSERT_EQ(previewStream.id, mResultBuffers[0].streamId);
- }
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ returnStatus = session->processCaptureRequest(
+ {request},
+ cachesToRemove,
+ [&status, &numRequestProcessed] (auto s, uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_EQ(numRequestProcessed, 1u);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (!inflightReq.errorCodeValid &&
+ ((0 < inflightReq.numBuffersLeft) ||
+ (!inflightReq.haveResultMetadata))) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kStreamBufferTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
+
+ ASSERT_FALSE(inflightReq.errorCodeValid);
+ ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
+ ASSERT_EQ(previewStream.id,
+ inflightReq.resultOutputBuffers[0].streamId);
+ }
+
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -2572,58 +3054,67 @@
// Test whether an incorrect capture request with missing settings will
// be reported correctly.
TEST_F(CameraHidlTest, processCaptureRequestInvalidSinglePreview) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputPreviewStreams;
- AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- uint64_t bufferId = 1;
- uint32_t frameNumber = 1;
- ::android::hardware::hidl_vec<uint8_t> settings;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputPreviewStreams;
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ uint64_t bufferId = 1;
+ uint32_t frameNumber = 1;
+ ::android::hardware::hidl_vec<uint8_t> settings;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- configurePreviewStream(name, env, &previewThreshold,
- &session /*out*/, &previewStream /*out*/,
- &halStreamConfig /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, provider.second, &previewThreshold,
+ &session /*out*/, &previewStream /*out*/,
+ &halStreamConfig /*out*/, &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
- sp<GraphicBuffer> gb = new GraphicBuffer(
- previewStream.width, previewStream.height,
- static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat),
- 1, android_convertGralloc1To0Usage(
- halStreamConfig.streams[0].producerUsage,
- halStreamConfig.streams[0].consumerUsage));
+ sp<GraphicBuffer> gb = new GraphicBuffer(
+ previewStream.width, previewStream.height,
+ static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat),
+ 1, android_convertGralloc1To0Usage(
+ halStreamConfig.streams[0].producerUsage,
+ halStreamConfig.streams[0].consumerUsage));
- StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
- bufferId, hidl_handle(gb->getNativeBuffer()->handle),
- BufferStatus::OK, nullptr, nullptr};
- ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {
- outputBuffer};
- StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
- BufferStatus::ERROR, nullptr, nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, outputBuffers};
+ StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
+ bufferId, hidl_handle(gb->getNativeBuffer()->handle),
+ BufferStatus::OK, nullptr, nullptr};
+ ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {
+ outputBuffer};
+ StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
+ BufferStatus::ERROR, nullptr, nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
+ emptyInputBuffer, outputBuffers};
- //Settings were not correctly initialized, we should fail here
- Status status = Status::OK;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- Return<void> ret = session->processCaptureRequest(
- {request},
- cachesToRemove,
- [&status, &numRequestProcessed] (auto s, uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(ret.isOk());
- ASSERT_EQ(Status::INTERNAL_ERROR, status);
- ASSERT_EQ(numRequestProcessed, 0u);
+ //Settings were not correctly initialized, we should fail here
+ Status status = Status::OK;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ Return<void> ret = session->processCaptureRequest(
+ {request},
+ cachesToRemove,
+ [&status, &numRequestProcessed] (auto s, uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(ret.isOk());
+ // b/64041692: Temporariy accept ILLEGAL_ARGUMENT or INTERNAL_ERROR
+ // It will be changed to only accept ILLEGAL_ARGUMENT in next release
+ ASSERT_TRUE(status == Status::ILLEGAL_ARGUMENT ||
+ status == Status::INTERNAL_ERROR);
+ ASSERT_EQ(numRequestProcessed, 0u);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
@@ -2631,207 +3122,233 @@
// Check whether an invalid capture request with missing output buffers
// will be reported correctly.
TEST_F(CameraHidlTest, processCaptureRequestInvalidBuffer) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputBlobStreams;
- AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- uint32_t frameNumber = 1;
- ::android::hardware::hidl_vec<uint8_t> settings;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputBlobStreams;
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ uint32_t frameNumber = 1;
+ ::android::hardware::hidl_vec<uint8_t> settings;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- configurePreviewStream(name, env, &previewThreshold,
- &session /*out*/, &previewStream /*out*/,
- &halStreamConfig /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, provider.second, &previewThreshold,
+ &session /*out*/, &previewStream /*out*/,
+ &halStreamConfig /*out*/, &supportsPartialResults/*out*/,
+ &partialResultCount /*out*/);
- RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
- Return<void> ret;
- ret = session->constructDefaultRequestSettings(reqTemplate,
- [&](auto status, const auto& req) {
- ASSERT_EQ(Status::OK, status);
- settings = req; });
- ASSERT_TRUE(ret.isOk());
+ RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
+ Return<void> ret;
+ ret = session->constructDefaultRequestSettings(reqTemplate,
+ [&](auto status, const auto& req) {
+ ASSERT_EQ(Status::OK, status);
+ settings = req; });
+ ASSERT_TRUE(ret.isOk());
- ::android::hardware::hidl_vec<StreamBuffer> emptyOutputBuffers;
- StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
- BufferStatus::ERROR, nullptr, nullptr};
- CaptureRequest request = {frameNumber, 0/* fmqSettingsSize */, settings,
- emptyInputBuffer, emptyOutputBuffers};
+ ::android::hardware::hidl_vec<StreamBuffer> emptyOutputBuffers;
+ StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
+ BufferStatus::ERROR, nullptr, nullptr};
+ CaptureRequest request = {frameNumber, 0/* fmqSettingsSize */,
+ settings, emptyInputBuffer, emptyOutputBuffers};
- //Output buffers are missing, we should fail here
- Status status = Status::OK;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- ret = session->processCaptureRequest(
- {request},
- cachesToRemove,
- [&status, &numRequestProcessed] (auto s, uint32_t n) {
- status = s;
- numRequestProcessed = n;
- });
- ASSERT_TRUE(ret.isOk());
- ASSERT_EQ(Status::INTERNAL_ERROR, status);
- ASSERT_EQ(numRequestProcessed, 0u);
+ //Output buffers are missing, we should fail here
+ Status status = Status::OK;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ ret = session->processCaptureRequest(
+ {request},
+ cachesToRemove,
+ [&status, &numRequestProcessed] (auto s, uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+ ASSERT_TRUE(ret.isOk());
+ // b/64041692: Temporariy accept ILLEGAL_ARGUMENT or INTERNAL_ERROR
+ // It will be changed to only accept ILLEGAL_ARGUMENT in next release
+ ASSERT_TRUE(status == Status::ILLEGAL_ARGUMENT ||
+ status == Status::INTERNAL_ERROR);
+ ASSERT_EQ(numRequestProcessed, 0u);
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
+ }
}
}
}
// Generate, trigger and flush a preview request
TEST_F(CameraHidlTest, flushPreviewRequest) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputPreviewStreams;
- AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- uint64_t bufferId = 1;
- uint32_t frameNumber = 1;
- ::android::hardware::hidl_vec<uint8_t> settings;
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputPreviewStreams;
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ uint64_t bufferId = 1;
+ uint32_t frameNumber = 1;
+ ::android::hardware::hidl_vec<uint8_t> settings;
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- configurePreviewStream(name, env, &previewThreshold,
- &session /*out*/, &previewStream /*out*/,
- &halStreamConfig /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, provider.second, &previewThreshold,
+ &session /*out*/, &previewStream /*out*/,
+ &halStreamConfig /*out*/, &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
- RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
- Return<void> ret;
- ret = session->constructDefaultRequestSettings(reqTemplate,
- [&](auto status, const auto& req) {
- ASSERT_EQ(Status::OK, status);
- settings = req; });
- ASSERT_TRUE(ret.isOk());
-
- sp<GraphicBuffer> gb = new GraphicBuffer(
- previewStream.width, previewStream.height,
- static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat),
- 1, android_convertGralloc1To0Usage(
- halStreamConfig.streams[0].producerUsage,
- halStreamConfig.streams[0].consumerUsage));
- ASSERT_NE(nullptr, gb.get());
- StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
- bufferId, hidl_handle(gb->getNativeBuffer()->handle),
- BufferStatus::OK, nullptr, nullptr};
- ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {
- outputBuffer};
- const StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
- BufferStatus::ERROR, nullptr, nullptr};
- CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings,
- emptyInputBuffer, outputBuffers};
-
- {
- std::unique_lock<std::mutex> l(mLock);
- mResultBuffers.clear();
- mErrors.clear();
- mResultFrameNumber = frameNumber;
- }
-
- Status status = Status::INTERNAL_ERROR;
- uint32_t numRequestProcessed = 0;
- hidl_vec<BufferCache> cachesToRemove;
- ret = session->processCaptureRequest(
- {request},
- cachesToRemove,
- [&status, &numRequestProcessed] (auto s, uint32_t n) {
- status = s;
- numRequestProcessed = n;
+ std::shared_ptr<ResultMetadataQueue> resultQueue;
+ auto resultQueueRet = session->getCaptureResultMetadataQueue(
+ [&resultQueue](const auto& descriptor) {
+ resultQueue = std::make_shared<ResultMetadataQueue>(
+ descriptor);
+ if (!resultQueue->isValid() ||
+ resultQueue->availableToWrite() <= 0) {
+ ALOGE("%s: HAL returns empty result metadata fmq,"
+ " not use it", __func__);
+ resultQueue = nullptr;
+ // Don't use the queue onwards.
+ }
});
+ ASSERT_TRUE(resultQueueRet.isOk());
- ASSERT_TRUE(ret.isOk());
- ASSERT_EQ(Status::OK, status);
- ASSERT_EQ(numRequestProcessed, 1u);
- //Flush before waiting for request to complete.
- Return<Status> returnStatus = session->flush();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
+ InFlightRequest inflightReq = {1, false, supportsPartialResults,
+ partialResultCount, resultQueue};
+ RequestTemplate reqTemplate = RequestTemplate::PREVIEW;
+ Return<void> ret;
+ ret = session->constructDefaultRequestSettings(reqTemplate,
+ [&](auto status, const auto& req) {
+ ASSERT_EQ(Status::OK, status);
+ settings = req; });
+ ASSERT_TRUE(ret.isOk());
- {
- std::unique_lock<std::mutex> l(mLock);
- while ((0 == mResultBuffers.size()) && (0 == mErrors.size())) {
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::seconds(kStreamBufferTimeoutSec);
- ASSERT_NE(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
+ sp<GraphicBuffer> gb = new GraphicBuffer(
+ previewStream.width, previewStream.height,
+ static_cast<int32_t>(halStreamConfig.streams[0].overrideFormat),
+ 1, android_convertGralloc1To0Usage(
+ halStreamConfig.streams[0].producerUsage,
+ halStreamConfig.streams[0].consumerUsage));
+ ASSERT_NE(nullptr, gb.get());
+ StreamBuffer outputBuffer = {halStreamConfig.streams[0].id,
+ bufferId, hidl_handle(gb->getNativeBuffer()->handle),
+ BufferStatus::OK, nullptr, nullptr};
+ ::android::hardware::hidl_vec<StreamBuffer> outputBuffers = {
+ outputBuffer};
+ const StreamBuffer emptyInputBuffer = {-1, 0, nullptr,
+ BufferStatus::ERROR, nullptr, nullptr};
+ CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */,
+ settings, emptyInputBuffer, outputBuffers};
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ mInflightMap.clear();
+ mInflightMap.add(frameNumber, &inflightReq);
}
- if (mErrors.empty()) {
- ASSERT_EQ(BufferStatus::OK, mResultBuffers[0].status);
- ASSERT_EQ(previewStream.id, mResultBuffers[0].streamId);
- } else {
- for (auto &error : mErrors) {
- switch (error.errorCode) {
+ Status status = Status::INTERNAL_ERROR;
+ uint32_t numRequestProcessed = 0;
+ hidl_vec<BufferCache> cachesToRemove;
+ ret = session->processCaptureRequest(
+ {request},
+ cachesToRemove,
+ [&status, &numRequestProcessed] (auto s, uint32_t n) {
+ status = s;
+ numRequestProcessed = n;
+ });
+
+ ASSERT_TRUE(ret.isOk());
+ ASSERT_EQ(Status::OK, status);
+ ASSERT_EQ(numRequestProcessed, 1u);
+ //Flush before waiting for request to complete.
+ Return<Status> returnStatus = session->flush();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ while (!inflightReq.errorCodeValid &&
+ ((0 < inflightReq.numBuffersLeft) ||
+ (!inflightReq.haveResultMetadata))) {
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::seconds(kStreamBufferTimeoutSec);
+ ASSERT_NE(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
+
+ if (!inflightReq.errorCodeValid) {
+ ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u);
+ ASSERT_EQ(previewStream.id,
+ inflightReq.resultOutputBuffers[0].streamId);
+ } else {
+ switch (inflightReq.errorCode) {
case ErrorCode::ERROR_REQUEST:
case ErrorCode::ERROR_RESULT:
- //Expected
- break;
case ErrorCode::ERROR_BUFFER:
- //Expected as well
- ASSERT_EQ(frameNumber, error.frameNumber);
- ASSERT_EQ(previewStream.id, error.errorStreamId);
+ //Expected
break;
case ErrorCode::ERROR_DEVICE:
default:
- FAIL() <<"Unexpected error:" << static_cast<uint32_t> (error.errorCode);
+ FAIL() << "Unexpected error:" << static_cast<uint32_t> (
+ inflightReq.errorCode);
}
}
+
+ ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
}
-
- ret = session->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
// Verify that camera flushes correctly without any pending requests.
TEST_F(CameraHidlTest, flushEmpty) {
- CameraHidlEnvironment* env = CameraHidlEnvironment::Instance();
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames();
- std::vector<AvailableStream> outputPreviewStreams;
- AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
- static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
+ for (auto provider : CameraHidlEnvironment::Instance()->mProviders) {
+ hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(
+ provider.second);
+ std::vector<AvailableStream> outputPreviewStreams;
+ AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight,
+ static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)};
- for (const auto& name : cameraDeviceNames) {
- if (getCameraDeviceVersion(name) == CAMERA_DEVICE_API_VERSION_3_2) {
- Stream previewStream;
- HalStreamConfiguration halStreamConfig;
- sp<ICameraDeviceSession> session;
- configurePreviewStream(name, env, &previewThreshold,
- &session /*out*/, &previewStream /*out*/,
- &halStreamConfig /*out*/);
+ for (const auto& name : cameraDeviceNames) {
+ if (getCameraDeviceVersion(name, provider.first) ==
+ CAMERA_DEVICE_API_VERSION_3_2) {
+ Stream previewStream;
+ HalStreamConfiguration halStreamConfig;
+ sp<ICameraDeviceSession> session;
+ bool supportsPartialResults = false;
+ uint32_t partialResultCount = 0;
+ configurePreviewStream(name, provider.second, &previewThreshold,
+ &session /*out*/, &previewStream /*out*/,
+ &halStreamConfig /*out*/, &supportsPartialResults /*out*/,
+ &partialResultCount /*out*/);
- {
- std::unique_lock<std::mutex> l(mLock);
- mResultBuffers.clear();
- mErrors.clear();
- mResultFrameNumber = 0;
+ Return<Status> returnStatus = session->flush();
+ ASSERT_TRUE(returnStatus.isOk());
+ ASSERT_EQ(Status::OK, returnStatus);
+
+ {
+ std::unique_lock<std::mutex> l(mLock);
+ auto timeout = std::chrono::system_clock::now() +
+ std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
+ ASSERT_EQ(std::cv_status::timeout,
+ mResultCondition.wait_until(l, timeout));
+ }
+
+ Return<void> ret = session->close();
+ ASSERT_TRUE(ret.isOk());
}
-
- Return<Status> returnStatus = session->flush();
- ASSERT_TRUE(returnStatus.isOk());
- ASSERT_EQ(Status::OK, returnStatus);
-
- {
- std::unique_lock<std::mutex> l(mLock);
- auto timeout = std::chrono::system_clock::now() +
- std::chrono::milliseconds(kEmptyFlushTimeoutMSec);
- ASSERT_EQ(std::cv_status::timeout,
- mResultCondition.wait_until(l, timeout));
- ASSERT_TRUE(mErrors.empty());
- ASSERT_TRUE(mResultBuffers.empty());
- }
-
- Return<void> ret = session->close();
- ASSERT_TRUE(ret.isOk());
}
}
}
@@ -3021,21 +3538,24 @@
// Open a device session and configure a preview stream.
void CameraHidlTest::configurePreviewStream(const std::string &name,
- const CameraHidlEnvironment* env,
+ sp<ICameraProvider> provider,
const AvailableStream *previewThreshold,
sp<ICameraDeviceSession> *session /*out*/,
Stream *previewStream /*out*/,
- HalStreamConfiguration *halStreamConfig /*out*/) {
- ASSERT_NE(nullptr, env);
+ HalStreamConfiguration *halStreamConfig /*out*/,
+ bool *supportsPartialResults /*out*/,
+ uint32_t *partialResultCount /*out*/) {
ASSERT_NE(nullptr, session);
ASSERT_NE(nullptr, previewStream);
ASSERT_NE(nullptr, halStreamConfig);
+ ASSERT_NE(nullptr, supportsPartialResults);
+ ASSERT_NE(nullptr, partialResultCount);
std::vector<AvailableStream> outputPreviewStreams;
::android::sp<ICameraDevice> device3_2;
ALOGI("configureStreams: Testing camera device %s", name.c_str());
Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
+ ret = provider->getCameraDeviceInterface_V3_x(
name,
[&](auto status, const auto& device) {
ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
@@ -3067,6 +3587,14 @@
});
ASSERT_TRUE(ret.isOk());
+ camera_metadata_ro_entry entry;
+ auto status = find_camera_metadata_ro_entry(staticMeta,
+ ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry);
+ if ((0 == status) && (entry.count > 0)) {
+ *partialResultCount = entry.data.i32[0];
+ *supportsPartialResults = (*partialResultCount > 1);
+ }
+
outputPreviewStreams.clear();
auto rc = getAvailableOutputStreams(staticMeta,
outputPreviewStreams, previewThreshold);
@@ -3078,7 +3606,7 @@
static_cast<uint32_t> (outputPreviewStreams[0].width),
static_cast<uint32_t> (outputPreviewStreams[0].height),
static_cast<PixelFormat> (outputPreviewStreams[0].format),
- 0, 0, StreamRotation::ROTATION_0};
+ GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0, StreamRotation::ROTATION_0};
::android::hardware::hidl_vec<Stream> streams = {*previewStream};
StreamConfiguration config = {streams,
StreamConfigurationMode::NORMAL_MODE};
@@ -3093,17 +3621,16 @@
// Open a device session with empty callbacks and return static metadata.
void CameraHidlTest::openEmptyDeviceSession(const std::string &name,
- const CameraHidlEnvironment* env,
+ sp<ICameraProvider> provider,
sp<ICameraDeviceSession> *session /*out*/,
camera_metadata_t **staticMeta /*out*/) {
- ASSERT_NE(nullptr, env);
ASSERT_NE(nullptr, session);
ASSERT_NE(nullptr, staticMeta);
::android::sp<ICameraDevice> device3_2;
ALOGI("configureStreams: Testing camera device %s", name.c_str());
Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V3_x(
+ ret = provider->getCameraDeviceInterface_V3_x(
name,
[&](auto status, const auto& device) {
ALOGI("getCameraDeviceInterface_V3_x returns status:%d",
@@ -3135,13 +3662,12 @@
// Open a particular camera device.
void CameraHidlTest::openCameraDevice(const std::string &name,
- const CameraHidlEnvironment* env,
+ sp<ICameraProvider> provider,
sp<::android::hardware::camera::device::V1_0::ICameraDevice> *device1 /*out*/) {
- ASSERT_TRUE(nullptr != env);
ASSERT_TRUE(nullptr != device1);
Return<void> ret;
- ret = env->mProvider->getCameraDeviceInterface_V1_x(
+ ret = provider->getCameraDeviceInterface_V1_x(
name,
[&](auto status, const auto& device) {
ALOGI("getCameraDeviceInterface_V1_x returns status:%d",
diff --git a/current.txt b/current.txt
index e714b62..0627b6c 100644
--- a/current.txt
+++ b/current.txt
@@ -190,3 +190,4 @@
# ABI preserving changes to HALs released in Android O
78589343d8ee2e1b155acad3fbdc7fcbb6af94491aee968b2383c21627264f8b android.hardware.radio@1.0::IRadioResponse
+c2c50ec74c87a583c683b4493f8f9f2e454a8d41c57af5b3eb88823a999f0ea4 android.hardware.radio@1.0::IRadioResponse
diff --git a/drm/1.0/vts/functional/drm_hal_clearkey_test.cpp b/drm/1.0/vts/functional/drm_hal_clearkey_test.cpp
index 04f2658..c27ae62 100644
--- a/drm/1.0/vts/functional/drm_hal_clearkey_test.cpp
+++ b/drm/1.0/vts/functional/drm_hal_clearkey_test.cpp
@@ -85,6 +85,10 @@
0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80,
0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80};
+static const uint32_t k256SubSampleByteCount = 256;
+static const uint32_t k512SubSampleClearBytes = 512;
+static const uint32_t k512SubSampleEncryptedBytes = 512;
+
class DrmHalClearkeyFactoryTest : public ::testing::VtsHalHidlTargetTestBase {
public:
virtual void SetUp() override {
@@ -932,6 +936,8 @@
const hidl_vec<SubSample>& subSamples, const vector<uint8_t>& key);
void aes_cbc_decrypt(uint8_t* dest, uint8_t* src, uint8_t* iv,
const hidl_vec<SubSample>& subSamples, const vector<uint8_t>& key);
+ void decryptWithInvalidKeys(hidl_vec<uint8_t>& invalidResponse,
+ vector<uint8_t>& iv, const Pattern& noPattern, const vector<SubSample>& subSamples);
};
void DrmHalClearkeyDecryptTest::fillRandom(const sp<IMemory>& memory) {
@@ -1089,16 +1095,14 @@
EXPECT_OK(res);
}
-
/**
* Positive decrypt test. "Decrypt" a single clear segment
*/
TEST_F(DrmHalClearkeyDecryptTest, ClearSegmentTest) {
vector<uint8_t> iv(AES_BLOCK_SIZE, 0);
const Pattern noPattern = {0, 0};
- const uint32_t kByteCount = 256;
const vector<SubSample> subSamples = {
- {.numBytesOfClearData = kByteCount,
+ {.numBytesOfClearData = k256SubSampleByteCount,
.numBytesOfEncryptedData = 0}};
auto sessionId = openSession();
loadKeys(sessionId);
@@ -1109,7 +1113,7 @@
const bool kNotSecure = false;
uint32_t byteCount = decrypt(Mode::UNENCRYPTED, &iv[0], subSamples,
noPattern, Status::OK);
- EXPECT_EQ(kByteCount, byteCount);
+ EXPECT_EQ(k256SubSampleByteCount, byteCount);
closeSession(sessionId);
}
@@ -1121,12 +1125,9 @@
TEST_F(DrmHalClearkeyDecryptTest, EncryptedAesCtrSegmentTest) {
vector<uint8_t> iv(AES_BLOCK_SIZE, 0);
const Pattern noPattern = {0, 0};
- const uint32_t kClearBytes = 512;
- const uint32_t kEncryptedBytes = 512;
const vector<SubSample> subSamples = {
- {.numBytesOfClearData = kClearBytes,
- .numBytesOfEncryptedData = kEncryptedBytes
- }};
+ {.numBytesOfClearData = k512SubSampleClearBytes,
+ .numBytesOfEncryptedData = k512SubSampleEncryptedBytes}};
auto sessionId = openSession();
loadKeys(sessionId);
@@ -1136,10 +1137,11 @@
const bool kNotSecure = false;
uint32_t byteCount = decrypt(Mode::AES_CTR, &iv[0], subSamples,
noPattern, Status::OK);
- EXPECT_EQ(kClearBytes + kEncryptedBytes, byteCount);
+ EXPECT_EQ(k512SubSampleClearBytes + k512SubSampleEncryptedBytes, byteCount);
closeSession(sessionId);
}
+
/**
* Negative decrypt test. Decrypt without loading keys.
*/
@@ -1147,8 +1149,8 @@
vector<uint8_t> iv(AES_BLOCK_SIZE, 0);
const Pattern noPattern = {0, 0};
const vector<SubSample> subSamples = {
- {.numBytesOfClearData = 256,
- .numBytesOfEncryptedData = 256}};
+ {.numBytesOfClearData = k256SubSampleByteCount,
+ .numBytesOfEncryptedData = k256SubSampleByteCount}};
auto sessionId = openSession();
Status status = cryptoPlugin->setMediaDrmSession(sessionId);
@@ -1161,3 +1163,94 @@
closeSession(sessionId);
}
+
+/**
+ * Helper method to test decryption with invalid keys is returned
+ */
+void DrmHalClearkeyDecryptTest::decryptWithInvalidKeys(
+ hidl_vec<uint8_t>& invalidResponse,
+ vector<uint8_t>& iv,
+ const Pattern& noPattern,
+ const vector<SubSample>& subSamples) {
+ auto sessionId = openSession();
+
+ auto res = drmPlugin->provideKeyResponse(
+ sessionId, invalidResponse,
+ [&](Status status, const hidl_vec<uint8_t>& myKeySetId) {
+ EXPECT_EQ(Status::OK, status);
+ EXPECT_EQ(0u, myKeySetId.size());
+ });
+ ASSERT_OK(res);
+
+ ASSERT_TRUE(cryptoPlugin->setMediaDrmSession(sessionId).isOk());
+
+ uint32_t byteCount = decrypt(Mode::AES_CTR, &iv[0], subSamples,
+ noPattern, Status::ERROR_DRM_NO_LICENSE);
+ EXPECT_EQ(0u, byteCount);
+
+ closeSession(sessionId);
+}
+
+/**
+ * Negative decrypt test. Decrypt with invalid key.
+ */
+TEST_F(DrmHalClearkeyDecryptTest, DecryptWithEmptyKey) {
+ vector<uint8_t> iv(AES_BLOCK_SIZE, 0);
+ const Pattern noPattern = {0, 0};
+ const vector<SubSample> subSamples = {
+ {.numBytesOfClearData = k512SubSampleClearBytes,
+ .numBytesOfEncryptedData = k512SubSampleEncryptedBytes}};
+
+ // base 64 encoded JSON response string, must not contain padding character '='
+ const hidl_string emptyKeyResponse =
+ "{\"keys\":[" \
+ "{" \
+ "\"kty\":\"oct\"" \
+ "\"alg\":\"A128KW2\"" \
+ "\"k\":\"SGVsbG8gRnJpZW5kIQ\"" \
+ "\"kid\":\"Y2xlYXJrZXlrZXlpZDAyAy\"" \
+ "}" \
+ "{" \
+ "\"kty\":\"oct\"," \
+ "\"alg\":\"A128KW2\"" \
+ "\"kid\":\"Y2xlYXJrZXlrZXlpZDAzAy\"," \
+ // empty key follows
+ "\"k\":\"R\"" \
+ "}]" \
+ "}";
+ const size_t kEmptyKeyResponseSize = emptyKeyResponse.size();
+
+ hidl_vec<uint8_t> invalidResponse;
+ invalidResponse.resize(kEmptyKeyResponseSize);
+ memcpy(invalidResponse.data(), emptyKeyResponse.c_str(), kEmptyKeyResponseSize);
+ decryptWithInvalidKeys(invalidResponse, iv, noPattern, subSamples);
+}
+
+/**
+ * Negative decrypt test. Decrypt with a key exceeds AES_BLOCK_SIZE.
+ */
+TEST_F(DrmHalClearkeyDecryptTest, DecryptWithKeyTooLong) {
+ vector<uint8_t> iv(AES_BLOCK_SIZE, 0);
+ const Pattern noPattern = {0, 0};
+ const vector<SubSample> subSamples = {
+ {.numBytesOfClearData = k512SubSampleClearBytes,
+ .numBytesOfEncryptedData = k512SubSampleEncryptedBytes}};
+
+ // base 64 encoded JSON response string, must not contain padding character '='
+ const hidl_string keyTooLongResponse =
+ "{\"keys\":[" \
+ "{" \
+ "\"kty\":\"oct\"," \
+ "\"alg\":\"A128KW2\"" \
+ "\"kid\":\"Y2xlYXJrZXlrZXlpZDAzAy\"," \
+ // key too long
+ "\"k\":\"V2lubmllIHRoZSBwb29oIVdpbm5pZSB0aGUgcG9vaCE=\"" \
+ "}]" \
+ "}";
+ const size_t kKeyTooLongResponseSize = keyTooLongResponse.size();
+
+ hidl_vec<uint8_t> invalidResponse;
+ invalidResponse.resize(kKeyTooLongResponseSize);
+ memcpy(invalidResponse.data(), keyTooLongResponse.c_str(), kKeyTooLongResponseSize);
+ decryptWithInvalidKeys(invalidResponse, iv, noPattern, subSamples);
+}
diff --git a/drm/1.0/vts/functional/drm_hal_vendor_test.cpp b/drm/1.0/vts/functional/drm_hal_vendor_test.cpp
index 33fb6fb..1be362a 100644
--- a/drm/1.0/vts/functional/drm_hal_vendor_test.cpp
+++ b/drm/1.0/vts/functional/drm_hal_vendor_test.cpp
@@ -129,7 +129,7 @@
// Do the same for the crypto factory
cryptoFactory = VtsTestBase::getService<ICryptoFactory>(name);
if (cryptoFactory == nullptr) {
- VtsTestBase::getService<ICryptoFactory>();
+ cryptoFactory = VtsTestBase::getService<ICryptoFactory>();
}
ASSERT_NE(cryptoFactory, nullptr);
@@ -1598,9 +1598,8 @@
#endif
gVendorModules = new drm_vts::VendorModules(kModulePath);
if (gVendorModules->getPathList().size() == 0) {
- std::cerr << "No vendor modules found in " << kModulePath <<
- ", exiting" << std::endl;
- exit(-1);
+ std::cerr << "WARNING: No vendor modules found in " << kModulePath <<
+ ", all vendor tests will be skipped" << std::endl;
}
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
diff --git a/gnss/1.0/vts/functional/VtsHalGnssV1_0TargetTest.cpp b/gnss/1.0/vts/functional/VtsHalGnssV1_0TargetTest.cpp
index 53181f1..c90c53d 100644
--- a/gnss/1.0/vts/functional/VtsHalGnssV1_0TargetTest.cpp
+++ b/gnss/1.0/vts/functional/VtsHalGnssV1_0TargetTest.cpp
@@ -208,10 +208,13 @@
* CheckLocation:
* Helper function to vet Location fields
*/
-void CheckLocation(GnssLocation& location, bool checkAccuracies) {
+void CheckLocation(GnssLocation& location, bool checkAccuracies,
+ bool checkSpeed) {
EXPECT_TRUE(location.gnssLocationFlags & GnssLocationFlags::HAS_LAT_LONG);
EXPECT_TRUE(location.gnssLocationFlags & GnssLocationFlags::HAS_ALTITUDE);
- EXPECT_TRUE(location.gnssLocationFlags & GnssLocationFlags::HAS_SPEED);
+ if (checkSpeed) {
+ EXPECT_TRUE(location.gnssLocationFlags & GnssLocationFlags::HAS_SPEED);
+ }
EXPECT_TRUE(location.gnssLocationFlags &
GnssLocationFlags::HAS_HORIZONTAL_ACCURACY);
// New uncertainties available in O must be provided,
@@ -232,12 +235,15 @@
EXPECT_LE(location.longitudeDegrees, 180.0);
EXPECT_GE(location.altitudeMeters, -1000.0);
EXPECT_LE(location.altitudeMeters, 30000.0);
- EXPECT_GE(location.speedMetersPerSec, 0.0);
- EXPECT_LE(location.speedMetersPerSec, 5.0); // VTS tests are stationary.
+ if (checkSpeed) {
+ // VTS tests are stationary. 5.0m/s max allows for measurement noise.
+ EXPECT_GE(location.speedMetersPerSec, 0.0);
+ EXPECT_LE(location.speedMetersPerSec, 5.0);
- // Non-zero speeds must be reported with an associated bearing
- if (location.speedMetersPerSec > 0.0) {
- EXPECT_TRUE(location.gnssLocationFlags & GnssLocationFlags::HAS_BEARING);
+ // Non-zero speeds must be reported with an associated bearing
+ if (location.speedMetersPerSec > 0.0) {
+ EXPECT_TRUE(location.gnssLocationFlags & GnssLocationFlags::HAS_BEARING);
+ }
}
/*
@@ -299,7 +305,8 @@
EXPECT_EQ(test->location_called_count_, 1);
}
if (test->location_called_count_ > 0) {
- CheckLocation(test->last_location_, checkAccuracies);
+ // don't require speed on first fix
+ CheckLocation(test->last_location_, checkAccuracies, false /* checkSpeed */ );
return true;
}
return false;
@@ -340,7 +347,7 @@
EXPECT_EQ(std::cv_status::no_timeout,
wait(LOCATION_TIMEOUT_SUBSEQUENT_SEC));
EXPECT_EQ(location_called_count_, i + 1);
- CheckLocation(last_location_, checkMoreAccuracies);
+ CheckLocation(last_location_, checkMoreAccuracies, true /* checkSpeed */);
}
}
@@ -450,4 +457,4 @@
int status = RUN_ALL_TESTS();
ALOGI("Test result = %d", status);
return status;
-}
\ No newline at end of file
+}
diff --git a/ir/1.0/vts/functional/VtsHalIrV1_0TargetTest.cpp b/ir/1.0/vts/functional/VtsHalIrV1_0TargetTest.cpp
index 3dad3c1..a017404 100644
--- a/ir/1.0/vts/functional/VtsHalIrV1_0TargetTest.cpp
+++ b/ir/1.0/vts/functional/VtsHalIrV1_0TargetTest.cpp
@@ -59,7 +59,7 @@
uint32_t len = 16;
hidl_vec<int32_t> vec;
vec.resize(len);
- std::fill(vec.begin(), vec.end(), 1);
+ std::fill(vec.begin(), vec.end(), 1000);
for (auto range = ranges.begin(); range != ranges.end(); range++) {
EXPECT_TRUE(ir->transmit(range->min, vec));
EXPECT_TRUE(ir->transmit(range->max, vec));
@@ -74,7 +74,6 @@
vec.resize(len);
std::fill(vec.begin(), vec.end(), 1);
EXPECT_FALSE(ir->transmit(-1, vec));
- EXPECT_FALSE(ir->transmit(0, vec));
}
int main(int argc, char **argv) {
diff --git a/keymaster/3.0/vts/functional/attestation_record.cpp b/keymaster/3.0/vts/functional/attestation_record.cpp
index 5d96fff..a428989 100644
--- a/keymaster/3.0/vts/functional/attestation_record.cpp
+++ b/keymaster/3.0/vts/functional/attestation_record.cpp
@@ -274,10 +274,12 @@
*keymaster_security_level =
static_cast<SecurityLevel>(ASN1_ENUMERATED_get(record->keymaster_security_level));
- attestation_challenge->setToExternal(record->attestation_challenge->data,
- record->attestation_challenge->length);
-
- unique_id->setToExternal(record->unique_id->data, record->unique_id->length);
+ auto& chall = record->attestation_challenge;
+ attestation_challenge->resize(chall->length);
+ memcpy(attestation_challenge->data(), chall->data, chall->length);
+ auto& uid = record->unique_id;
+ unique_id->resize(uid->length);
+ memcpy(unique_id->data(), uid->data, uid->length);
ErrorCode error = extract_auth_list(record->software_enforced, software_enforced);
if (error != ErrorCode::OK) return error;
diff --git a/keymaster/3.0/vts/functional/keymaster_hidl_hal_test.cpp b/keymaster/3.0/vts/functional/keymaster_hidl_hal_test.cpp
index b950765..e1f8350 100644
--- a/keymaster/3.0/vts/functional/keymaster_hidl_hal_test.cpp
+++ b/keymaster/3.0/vts/functional/keymaster_hidl_hal_test.cpp
@@ -1730,7 +1730,7 @@
.Authorization(TAG_NO_AUTH_REQUIRED)
.EcdsaSigningKey(224)
.Digest(Digest::NONE)));
- string message(64 * 1024, 'a');
+ string message(2 * 1024, 'a');
SignMessage(message, AuthorizationSetBuilder().Digest(Digest::NONE));
}
@@ -2564,7 +2564,7 @@
}
/*
- * EncryptionOperationsTest.RsaNoPaddingTooLong
+ * EncryptionOperationsTest.RsaNoPaddingTooLarge
*
* Verifies that raw RSA encryption of too-large (numerically) messages fails in the expected way.
*/
@@ -3864,7 +3864,7 @@
* Verifies that the addRngEntropy method doesn't blow up when given a largish amount of data.
*/
TEST_F(AddEntropyTest, AddLargeEntropy) {
- EXPECT_EQ(ErrorCode::OK, keymaster().addRngEntropy(HidlBuf(string(16 * 1024, 'a'))));
+ EXPECT_EQ(ErrorCode::OK, keymaster().addRngEntropy(HidlBuf(string(2 * 1024, 'a'))));
}
typedef KeymasterHidlTest AttestationTest;
diff --git a/media/omx/1.0/vts/functional/audio/VtsHalMediaOmxV1_0TargetAudioDecTest.cpp b/media/omx/1.0/vts/functional/audio/VtsHalMediaOmxV1_0TargetAudioDecTest.cpp
index fe67cb9..9e5dde1 100644
--- a/media/omx/1.0/vts/functional/audio/VtsHalMediaOmxV1_0TargetAudioDecTest.cpp
+++ b/media/omx/1.0/vts/functional/audio/VtsHalMediaOmxV1_0TargetAudioDecTest.cpp
@@ -474,15 +474,20 @@
// port settings reconfiguration during runtime. reconfigures sample rate and
// number
+typedef struct {
+ OMX_AUDIO_CODINGTYPE eEncoding;
+ AudioDecHidlTest::standardComp comp;
+} packedArgs;
void portReconfiguration(sp<IOmxNode> omxNode, sp<CodecObserver> observer,
android::Vector<BufferInfo>* iBuffer,
android::Vector<BufferInfo>* oBuffer,
- OMX_AUDIO_CODINGTYPE eEncoding,
OMX_U32 kPortIndexInput, OMX_U32 kPortIndexOutput,
- Message msg,
- AudioDecHidlTest::standardComp comp =
- AudioDecHidlTest::standardComp::unknown_comp) {
+ Message msg, PortMode oPortMode, void* args) {
android::hardware::media::omx::V1_0::Status status;
+ packedArgs* audioArgs = static_cast<packedArgs*>(args);
+ OMX_AUDIO_CODINGTYPE eEncoding = audioArgs->eEncoding;
+ AudioDecHidlTest::standardComp comp = audioArgs->comp;
+ (void)oPortMode;
if (msg.data.eventData.event == OMX_EventPortSettingsChanged) {
ASSERT_EQ(msg.data.eventData.data1, kPortIndexOutput);
@@ -568,7 +573,8 @@
android::Vector<BufferInfo>* iBuffer,
android::Vector<BufferInfo>* oBuffer,
OMX_AUDIO_CODINGTYPE eEncoding,
- OMX_U32 kPortIndexInput, OMX_U32 kPortIndexOutput) {
+ OMX_U32 kPortIndexInput, OMX_U32 kPortIndexOutput,
+ AudioDecHidlTest::standardComp comp) {
android::hardware::media::omx::V1_0::Status status;
Message msg;
int timeOut = TIMEOUT_COUNTER;
@@ -579,8 +585,10 @@
observer->dequeueMessage(&msg, DEFAULT_TIMEOUT, iBuffer, oBuffer);
if (status == android::hardware::media::omx::V1_0::Status::OK) {
EXPECT_EQ(msg.type, Message::Type::EVENT);
- portReconfiguration(omxNode, observer, iBuffer, oBuffer, eEncoding,
- kPortIndexInput, kPortIndexOutput, msg);
+ packedArgs audioArgs = {eEncoding, comp};
+ portReconfiguration(omxNode, observer, iBuffer, oBuffer,
+ kPortIndexInput, kPortIndexOutput, msg,
+ PortMode::PRESET_BYTE_BUFFER, &audioArgs);
}
// status == TIMED_OUT, it could be due to process time being large
// than DEFAULT_TIMEOUT or component needs output buffers to start
@@ -644,8 +652,10 @@
// Port Reconfiguration
if (status == android::hardware::media::omx::V1_0::Status::OK &&
msg.type == Message::Type::EVENT) {
- portReconfiguration(omxNode, observer, iBuffer, oBuffer, eEncoding,
- kPortIndexInput, kPortIndexOutput, msg, comp);
+ packedArgs audioArgs = {eEncoding, comp};
+ portReconfiguration(omxNode, observer, iBuffer, oBuffer,
+ kPortIndexInput, kPortIndexOutput, msg,
+ PortMode::PRESET_BYTE_BUFFER, &audioArgs);
}
if (frameID == (int)Info->size() || frameID == (offset + range)) break;
@@ -745,13 +755,13 @@
int bytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
- timestampDevTest = true;
+ timestampDevTest = false;
while (1) {
if (!(eleInfo >> bytesCount)) break;
eleInfo >> flags;
eleInfo >> timestamp;
Info.push_back({bytesCount, flags, timestamp});
- if (flags != OMX_BUFFERFLAG_CODECCONFIG)
+ if (timestampDevTest && (flags != OMX_BUFFERFLAG_CODECCONFIG))
timestampUslist.push_back(timestamp);
}
eleInfo.close();
@@ -789,9 +799,11 @@
(int)Info.size(), compName);
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer, eEncoding,
- kPortIndexInput, kPortIndexOutput);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag);
- EXPECT_EQ(timestampUslist.empty(), true);
+ kPortIndexInput, kPortIndexOutput, compName);
+ packedArgs audioArgs = {eEncoding, compName};
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag, nullptr,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, &audioArgs);
+ if (timestampDevTest) EXPECT_EQ(timestampUslist.empty(), true);
// set state to idle
changeStateExecutetoIdle(omxNode, observer, &iBuffer, &oBuffer);
// set state to executing
@@ -846,7 +858,9 @@
changeStateIdletoExecute(omxNode, observer);
// request EOS at the start
- testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag);
+ packedArgs audioArgs = {eEncoding, compName};
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, nullptr,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, &audioArgs);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
EXPECT_GE(framesReceived, 0U);
@@ -933,8 +947,10 @@
compName);
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer, eEncoding,
- kPortIndexInput, kPortIndexOutput);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag);
+ kPortIndexInput, kPortIndexOutput, compName);
+ packedArgs audioArgs = {eEncoding, compName};
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag, nullptr,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, &audioArgs);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
EXPECT_GE(framesReceived, 1U);
@@ -949,8 +965,9 @@
compName, false);
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer, eEncoding,
- kPortIndexInput, kPortIndexOutput);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag);
+ kPortIndexInput, kPortIndexOutput, compName);
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, nullptr,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, &audioArgs);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
EXPECT_GE(framesReceived, 1U);
@@ -1034,8 +1051,10 @@
(int)Info.size(), compName, false);
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer, eEncoding,
- kPortIndexInput, kPortIndexOutput);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag);
+ kPortIndexInput, kPortIndexOutput, compName);
+ packedArgs audioArgs = {eEncoding, compName};
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, nullptr,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, &audioArgs);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
framesReceived = 0;
diff --git a/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.cpp b/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.cpp
index 7240964..4c68219 100644
--- a/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.cpp
+++ b/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.cpp
@@ -46,47 +46,6 @@
#include <media_hidl_test_common.h>
#include <memory>
-Return<android::hardware::media::omx::V1_0::Status> setAudioPortFormat(
- sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE eEncoding) {
- OMX_U32 index = 0;
- OMX_AUDIO_PARAM_PORTFORMATTYPE portFormat;
- std::vector<OMX_AUDIO_CODINGTYPE> arrEncoding;
- android::hardware::media::omx::V1_0::Status status;
-
- while (1) {
- portFormat.nIndex = index;
- status = getPortParam(omxNode, OMX_IndexParamAudioPortFormat, portIndex,
- &portFormat);
- if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
- arrEncoding.push_back(portFormat.eEncoding);
- index++;
- if (index == 512) {
- // enumerated way too many formats, highly unusual for this to
- // happen.
- EXPECT_LE(index, 512U)
- << "Expecting OMX_ErrorNoMore but not received";
- break;
- }
- }
- if (!index) return status;
- for (index = 0; index < arrEncoding.size(); index++) {
- if (arrEncoding[index] == eEncoding) {
- portFormat.eEncoding = arrEncoding[index];
- break;
- }
- }
- if (index == arrEncoding.size()) {
- ALOGE("setting default Port format %x", (int)arrEncoding[0]);
- portFormat.eEncoding = arrEncoding[0];
- }
- // In setParam call nIndex shall be ignored as per omx-il specification.
- // see how this holds up by corrupting nIndex
- portFormat.nIndex = RANDOM_INDEX;
- status = setPortParam(omxNode, OMX_IndexParamAudioPortFormat, portIndex,
- &portFormat);
- return status;
-}
-
void enumerateProfile(sp<IOmxNode> omxNode, OMX_U32 portIndex,
std::vector<int32_t>* arrProfile) {
android::hardware::media::omx::V1_0::Status status;
diff --git a/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.h b/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.h
index 70142f2..08b3d9c 100644
--- a/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.h
+++ b/media/omx/1.0/vts/functional/audio/media_audio_hidl_test_common.h
@@ -27,9 +27,6 @@
/*
* Common audio utils
*/
-Return<android::hardware::media::omx::V1_0::Status> setAudioPortFormat(
- sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE eEncoding);
-
void enumerateProfile(sp<IOmxNode> omxNode, OMX_U32 portIndex,
std::vector<int32_t>* arrProfile);
diff --git a/media/omx/1.0/vts/functional/common/Android.bp b/media/omx/1.0/vts/functional/common/Android.bp
old mode 100755
new mode 100644
diff --git a/media/omx/1.0/vts/functional/common/media_hidl_test_common.cpp b/media/omx/1.0/vts/functional/common/media_hidl_test_common.cpp
old mode 100755
new mode 100644
index e81e6dd..1f67e2b
--- a/media/omx/1.0/vts/functional/common/media_hidl_test_common.cpp
+++ b/media/omx/1.0/vts/functional/common/media_hidl_test_common.cpp
@@ -60,6 +60,132 @@
return setParam(omxNode, OMX_IndexParamStandardComponentRole, ¶ms);
}
+Return<android::hardware::media::omx::V1_0::Status> setPortBufferSize(
+ sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_U32 size) {
+ android::hardware::media::omx::V1_0::Status status;
+ OMX_PARAM_PORTDEFINITIONTYPE portDef;
+
+ status = getPortParam(omxNode, OMX_IndexParamPortDefinition, portIndex,
+ &portDef);
+ if (status != ::android::hardware::media::omx::V1_0::Status::OK)
+ return status;
+ if (portDef.nBufferSize < size) {
+ portDef.nBufferSize = size;
+ status = setPortParam(omxNode, OMX_IndexParamPortDefinition, portIndex,
+ &portDef);
+ if (status != ::android::hardware::media::omx::V1_0::Status::OK)
+ return status;
+ }
+ return status;
+}
+
+// get/set video component port format
+Return<android::hardware::media::omx::V1_0::Status> setVideoPortFormat(
+ sp<IOmxNode> omxNode, OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE eCompressionFormat, OMX_COLOR_FORMATTYPE eColorFormat,
+ OMX_U32 xFramerate) {
+ OMX_U32 index = 0;
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ std::vector<OMX_COLOR_FORMATTYPE> arrColorFormat;
+ std::vector<OMX_VIDEO_CODINGTYPE> arrCompressionFormat;
+ android::hardware::media::omx::V1_0::Status status;
+
+ while (1) {
+ portFormat.nIndex = index;
+ status = getPortParam(omxNode, OMX_IndexParamVideoPortFormat, portIndex,
+ &portFormat);
+ if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
+ if (eCompressionFormat == OMX_VIDEO_CodingUnused)
+ arrColorFormat.push_back(portFormat.eColorFormat);
+ else
+ arrCompressionFormat.push_back(portFormat.eCompressionFormat);
+ index++;
+ if (index == 512) {
+ // enumerated way too many formats, highly unusual for this to
+ // happen.
+ EXPECT_LE(index, 512U)
+ << "Expecting OMX_ErrorNoMore but not received";
+ break;
+ }
+ }
+ if (!index) return status;
+ if (eCompressionFormat == OMX_VIDEO_CodingUnused) {
+ for (index = 0; index < arrColorFormat.size(); index++) {
+ if (arrColorFormat[index] == eColorFormat) {
+ portFormat.eColorFormat = arrColorFormat[index];
+ break;
+ }
+ }
+ if (index == arrColorFormat.size()) {
+ ALOGE("setting default color format %x", (int)arrColorFormat[0]);
+ portFormat.eColorFormat = arrColorFormat[0];
+ }
+ portFormat.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ } else {
+ for (index = 0; index < arrCompressionFormat.size(); index++) {
+ if (arrCompressionFormat[index] == eCompressionFormat) {
+ portFormat.eCompressionFormat = arrCompressionFormat[index];
+ break;
+ }
+ }
+ if (index == arrCompressionFormat.size()) {
+ ALOGE("setting default compression format %x",
+ (int)arrCompressionFormat[0]);
+ portFormat.eCompressionFormat = arrCompressionFormat[0];
+ }
+ portFormat.eColorFormat = OMX_COLOR_FormatUnused;
+ }
+ // In setParam call nIndex shall be ignored as per omx-il specification.
+ // see how this holds up by corrupting nIndex
+ portFormat.nIndex = RANDOM_INDEX;
+ portFormat.xFramerate = xFramerate;
+ status = setPortParam(omxNode, OMX_IndexParamVideoPortFormat, portIndex,
+ &portFormat);
+ return status;
+}
+
+// get/set audio component port format
+Return<android::hardware::media::omx::V1_0::Status> setAudioPortFormat(
+ sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE eEncoding) {
+ OMX_U32 index = 0;
+ OMX_AUDIO_PARAM_PORTFORMATTYPE portFormat;
+ std::vector<OMX_AUDIO_CODINGTYPE> arrEncoding;
+ android::hardware::media::omx::V1_0::Status status;
+
+ while (1) {
+ portFormat.nIndex = index;
+ status = getPortParam(omxNode, OMX_IndexParamAudioPortFormat, portIndex,
+ &portFormat);
+ if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
+ arrEncoding.push_back(portFormat.eEncoding);
+ index++;
+ if (index == 512) {
+ // enumerated way too many formats, highly unusual for this to
+ // happen.
+ EXPECT_LE(index, 512U)
+ << "Expecting OMX_ErrorNoMore but not received";
+ break;
+ }
+ }
+ if (!index) return status;
+ for (index = 0; index < arrEncoding.size(); index++) {
+ if (arrEncoding[index] == eEncoding) {
+ portFormat.eEncoding = arrEncoding[index];
+ break;
+ }
+ }
+ if (index == arrEncoding.size()) {
+ ALOGE("setting default Port format %x", (int)arrEncoding[0]);
+ portFormat.eEncoding = arrEncoding[0];
+ }
+ // In setParam call nIndex shall be ignored as per omx-il specification.
+ // see how this holds up by corrupting nIndex
+ portFormat.nIndex = RANDOM_INDEX;
+ status = setPortParam(omxNode, OMX_IndexParamAudioPortFormat, portIndex,
+ &portFormat);
+ return status;
+}
+
// allocate buffers needed on a component port
void allocatePortBuffers(sp<IOmxNode> omxNode,
android::Vector<BufferInfo>* buffArray,
@@ -406,7 +532,8 @@
void testEOS(sp<IOmxNode> omxNode, sp<CodecObserver> observer,
android::Vector<BufferInfo>* iBuffer,
android::Vector<BufferInfo>* oBuffer, bool signalEOS,
- bool& eosFlag, PortMode* portMode) {
+ bool& eosFlag, PortMode* portMode, portreconfig fptr,
+ OMX_U32 kPortIndexInput, OMX_U32 kPortIndexOutput, void* args) {
android::hardware::media::omx::V1_0::Status status;
PortMode defaultPortMode[2], *pm;
@@ -443,9 +570,15 @@
status =
observer->dequeueMessage(&msg, DEFAULT_TIMEOUT, iBuffer, oBuffer);
if (status == android::hardware::media::omx::V1_0::Status::OK) {
- if (msg.data.eventData.event == OMX_EventBufferFlag) {
- // soft omx components donot send this, we will just ignore it
- // for now
+ if (msg.data.eventData.event == OMX_EventPortSettingsChanged) {
+ if (fptr) {
+ (*fptr)(omxNode, observer, iBuffer, oBuffer,
+ kPortIndexInput, kPortIndexOutput, msg, pm[1],
+ args);
+ } else {
+ // something unexpected happened
+ EXPECT_TRUE(false);
+ }
} else {
// something unexpected happened
EXPECT_TRUE(false);
diff --git a/media/omx/1.0/vts/functional/common/media_hidl_test_common.h b/media/omx/1.0/vts/functional/common/media_hidl_test_common.h
index d617e45..0adea14 100644
--- a/media/omx/1.0/vts/functional/common/media_hidl_test_common.h
+++ b/media/omx/1.0/vts/functional/common/media_hidl_test_common.h
@@ -36,6 +36,14 @@
#define DEFAULT_TIMEOUT 100000
#define TIMEOUT_COUNTER (10000000 / DEFAULT_TIMEOUT)
+/*
+ * Random Index used for monkey testing while get/set parameters
+ */
+#define RANDOM_INDEX 1729
+
+#define ALIGN_POWER_OF_TWO(value, n) \
+ (((value) + ((1 << (n)) - 1)) & ~((1 << (n)) - 1))
+
enum bufferOwner {
client,
component,
@@ -113,6 +121,12 @@
android::hardware::media::omx::V1_0::Message::Type::EVENT) {
*msg = *it;
msgQueue.erase(it);
+ // OMX_EventBufferFlag event is sent when the component has
+ // processed a buffer with its EOS flag set. This event is
+ // not sent by soft omx components. Vendor components can
+ // send this. From IOMX point of view, we will ignore this
+ // event.
+ if (msg->data.eventData.event == OMX_EventBufferFlag) break;
return ::android::hardware::media::omx::V1_0::Status::OK;
} else if (it->type == android::hardware::media::omx::V1_0::
Message::Type::FILL_BUFFER_DONE) {
@@ -259,6 +273,17 @@
Return<android::hardware::media::omx::V1_0::Status> setRole(
sp<IOmxNode> omxNode, const char* role);
+Return<android::hardware::media::omx::V1_0::Status> setPortBufferSize(
+ sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_U32 size);
+
+Return<android::hardware::media::omx::V1_0::Status> setVideoPortFormat(
+ sp<IOmxNode> omxNode, OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE eCompressionFormat, OMX_COLOR_FORMATTYPE eColorFormat,
+ OMX_U32 xFramerate);
+
+Return<android::hardware::media::omx::V1_0::Status> setAudioPortFormat(
+ sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE eEncoding);
+
void allocatePortBuffers(sp<IOmxNode> omxNode,
android::Vector<BufferInfo>* buffArray,
OMX_U32 portIndex,
@@ -299,9 +324,16 @@
android::Vector<BufferInfo>* oBuffer, OMX_U32 kPortIndexInput,
OMX_U32 kPortIndexOutput, int64_t timeoutUs = DEFAULT_TIMEOUT);
+typedef void (*portreconfig)(sp<IOmxNode> omxNode, sp<CodecObserver> observer,
+ android::Vector<BufferInfo>* iBuffer,
+ android::Vector<BufferInfo>* oBuffer,
+ OMX_U32 kPortIndexInput, OMX_U32 kPortIndexOutput,
+ Message msg, PortMode oPortMode, void* args);
void testEOS(sp<IOmxNode> omxNode, sp<CodecObserver> observer,
android::Vector<BufferInfo>* iBuffer,
android::Vector<BufferInfo>* oBuffer, bool signalEOS,
- bool& eosFlag, PortMode* portMode = nullptr);
+ bool& eosFlag, PortMode* portMode = nullptr,
+ portreconfig fptr = nullptr, OMX_U32 kPortIndexInput = 0,
+ OMX_U32 kPortIndexOutput = 1, void* args = nullptr);
#endif // MEDIA_HIDL_TEST_COMMON_H
diff --git a/media/omx/1.0/vts/functional/component/VtsHalMediaOmxV1_0TargetComponentTest.cpp b/media/omx/1.0/vts/functional/component/VtsHalMediaOmxV1_0TargetComponentTest.cpp
index 357c11e..38860ed 100644
--- a/media/omx/1.0/vts/functional/component/VtsHalMediaOmxV1_0TargetComponentTest.cpp
+++ b/media/omx/1.0/vts/functional/component/VtsHalMediaOmxV1_0TargetComponentTest.cpp
@@ -225,113 +225,6 @@
return;
}
-// get/set video component port format
-Return<android::hardware::media::omx::V1_0::Status> setVideoPortFormat(
- sp<IOmxNode> omxNode, OMX_U32 portIndex,
- OMX_VIDEO_CODINGTYPE eCompressionFormat, OMX_COLOR_FORMATTYPE eColorFormat,
- OMX_U32 xFramerate) {
- OMX_U32 index = 0;
- OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
- std::vector<OMX_COLOR_FORMATTYPE> arrColorFormat;
- std::vector<OMX_VIDEO_CODINGTYPE> arrCompressionFormat;
- android::hardware::media::omx::V1_0::Status status;
-
- while (1) {
- portFormat.nIndex = index;
- status = getPortParam(omxNode, OMX_IndexParamVideoPortFormat, portIndex,
- &portFormat);
- if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
- if (eCompressionFormat == OMX_VIDEO_CodingUnused)
- arrColorFormat.push_back(portFormat.eColorFormat);
- else
- arrCompressionFormat.push_back(portFormat.eCompressionFormat);
- index++;
- if (index == 512) {
- // enumerated way too many formats, highly unusual for this to
- // happen.
- EXPECT_LE(index, 512U)
- << "Expecting OMX_ErrorNoMore but not received";
- break;
- }
- }
- if (!index) return status;
- if (eCompressionFormat == OMX_VIDEO_CodingUnused) {
- for (index = 0; index < arrColorFormat.size(); index++) {
- if (arrColorFormat[index] == eColorFormat) {
- portFormat.eColorFormat = arrColorFormat[index];
- break;
- }
- }
- if (index == arrColorFormat.size()) {
- ALOGE("setting default color format %x", (int)arrColorFormat[0]);
- portFormat.eColorFormat = arrColorFormat[0];
- }
- portFormat.eCompressionFormat = OMX_VIDEO_CodingUnused;
- } else {
- for (index = 0; index < arrCompressionFormat.size(); index++) {
- if (arrCompressionFormat[index] == eCompressionFormat) {
- portFormat.eCompressionFormat = arrCompressionFormat[index];
- break;
- }
- }
- if (index == arrCompressionFormat.size()) {
- ALOGE("setting default compression format %x",
- (int)arrCompressionFormat[0]);
- portFormat.eCompressionFormat = arrCompressionFormat[0];
- }
- portFormat.eColorFormat = OMX_COLOR_FormatUnused;
- }
- // In setParam call nIndex shall be ignored as per omx-il specification.
- // see how this holds up by corrupting nIndex
- portFormat.nIndex = RANDOM_INDEX;
- portFormat.xFramerate = xFramerate;
- status = setPortParam(omxNode, OMX_IndexParamVideoPortFormat, portIndex,
- &portFormat);
- return status;
-}
-
-// get/set audio component port format
-Return<android::hardware::media::omx::V1_0::Status> setAudioPortFormat(
- sp<IOmxNode> omxNode, OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE eEncoding) {
- OMX_U32 index = 0;
- OMX_AUDIO_PARAM_PORTFORMATTYPE portFormat;
- std::vector<OMX_AUDIO_CODINGTYPE> arrEncoding;
- android::hardware::media::omx::V1_0::Status status;
-
- while (1) {
- portFormat.nIndex = index;
- status = getPortParam(omxNode, OMX_IndexParamAudioPortFormat, portIndex,
- &portFormat);
- if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
- arrEncoding.push_back(portFormat.eEncoding);
- index++;
- if (index == 512) {
- // enumerated way too many formats, highly unusual for this to
- // happen.
- EXPECT_LE(index, 512U)
- << "Expecting OMX_ErrorNoMore but not received";
- break;
- }
- }
- if (!index) return status;
- for (index = 0; index < arrEncoding.size(); index++) {
- if (arrEncoding[index] == eEncoding) {
- portFormat.eEncoding = arrEncoding[index];
- break;
- }
- }
- if (index == arrEncoding.size()) {
- ALOGE("setting default Port format %x", (int)arrEncoding[0]);
- portFormat.eEncoding = arrEncoding[0];
- }
- // In setParam call nIndex shall be ignored as per omx-il specification.
- // see how this holds up by corrupting nIndex
- portFormat.nIndex = RANDOM_INDEX;
- status = setPortParam(omxNode, OMX_IndexParamAudioPortFormat, portIndex,
- &portFormat);
- return status;
-}
-
// test dispatch message API call
TEST_F(ComponentHidlTest, dispatchMsg) {
description("test dispatch message API call");
diff --git a/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoDecTest.cpp b/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoDecTest.cpp
index 2ff8ed3..6e2e739 100644
--- a/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoDecTest.cpp
+++ b/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoDecTest.cpp
@@ -27,6 +27,7 @@
#include <android/hidl/allocator/1.0/IAllocator.h>
#include <android/hidl/memory/1.0/IMapper.h>
#include <android/hidl/memory/1.0/IMemory.h>
+#include <cutils/atomic.h>
using ::android::hardware::graphics::common::V1_0::BufferUsage;
using ::android::hardware::graphics::common::V1_0::PixelFormat;
@@ -47,6 +48,7 @@
#include <VtsHalHidlTargetTestBase.h>
#include <getopt.h>
+#include <media/hardware/HardwareAPI.h>
#include <media_hidl_test_common.h>
#include <media_video_hidl_test_common.h>
#include <fstream>
@@ -399,7 +401,7 @@
void allocateGraphicBuffers(sp<IOmxNode> omxNode, OMX_U32 portIndex,
android::Vector<BufferInfo>* buffArray,
uint32_t nFrameWidth, uint32_t nFrameHeight,
- int32_t* nStride, uint32_t count) {
+ int32_t* nStride, int format, uint32_t count) {
android::hardware::media::omx::V1_0::Status status;
sp<android::hardware::graphics::allocator::V2_0::IAllocator> allocator =
android::hardware::graphics::allocator::V2_0::IAllocator::getService();
@@ -416,7 +418,7 @@
descriptorInfo.width = nFrameWidth;
descriptorInfo.height = nFrameHeight;
descriptorInfo.layerCount = 1;
- descriptorInfo.format = PixelFormat::RGBA_8888;
+ descriptorInfo.format = static_cast<PixelFormat>(format);
descriptorInfo.usage = static_cast<uint64_t>(BufferUsage::CPU_READ_OFTEN);
omxNode->getGraphicBufferUsage(
portIndex,
@@ -441,6 +443,9 @@
EXPECT_EQ(error, android::hardware::graphics::mapper::V2_0::Error::NONE);
EXPECT_EQ(buffArray->size(), count);
+
+ static volatile int32_t nextId = 0;
+ uint64_t id = static_cast<uint64_t>(getpid()) << 32;
allocator->allocate(
descriptor, count,
[&](android::hardware::graphics::mapper::V2_0::Error _s, uint32_t _n1,
@@ -464,7 +469,7 @@
buffArray->editItemAt(i).omxBuffer.attr.anwBuffer.layerCount =
descriptorInfo.layerCount;
buffArray->editItemAt(i).omxBuffer.attr.anwBuffer.id =
- (*buffArray)[i].id;
+ id | static_cast<uint32_t>(android_atomic_inc(&nextId));
}
});
}
@@ -474,13 +479,29 @@
android::Vector<BufferInfo>* iBuffer,
android::Vector<BufferInfo>* oBuffer,
OMX_U32 kPortIndexInput, OMX_U32 kPortIndexOutput,
- Message msg, PortMode oPortMode) {
+ Message msg, PortMode oPortMode, void* args) {
android::hardware::media::omx::V1_0::Status status;
+ (void)args;
if (msg.data.eventData.event == OMX_EventPortSettingsChanged) {
ASSERT_EQ(msg.data.eventData.data1, kPortIndexOutput);
if (msg.data.eventData.data2 == OMX_IndexParamPortDefinition ||
msg.data.eventData.data2 == 0) {
+ // Components can send various kinds of port settings changed events
+ // all at once. Before committing to a full port reconfiguration,
+ // defer any events waiting in the queue to be addressed to a later
+ // point.
+ android::List<Message> msgQueueDefer;
+ while (1) {
+ status = observer->dequeueMessage(&msg, DEFAULT_TIMEOUT,
+ iBuffer, oBuffer);
+ if (status !=
+ android::hardware::media::omx::V1_0::Status::TIMED_OUT) {
+ msgQueueDefer.push_back(msg);
+ continue;
+ } else
+ break;
+ }
status = omxNode->sendCommand(
toRawCommandType(OMX_CommandPortDisable), kPortIndexOutput);
ASSERT_EQ(status, android::hardware::media::omx::V1_0::Status::OK);
@@ -509,12 +530,15 @@
// set Port Params
uint32_t nFrameWidth, nFrameHeight, xFramerate;
- OMX_COLOR_FORMATTYPE eColorFormat =
- OMX_COLOR_FormatYUV420Planar;
getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth,
&nFrameHeight, &xFramerate);
+ // get configured color format
+ OMX_PARAM_PORTDEFINITIONTYPE portDef;
+ status = getPortParam(omxNode, OMX_IndexParamPortDefinition,
+ kPortIndexOutput, &portDef);
setDefaultPortParam(omxNode, kPortIndexOutput,
- OMX_VIDEO_CodingUnused, eColorFormat,
+ OMX_VIDEO_CodingUnused,
+ portDef.format.video.eColorFormat,
nFrameWidth, nFrameHeight, 0, xFramerate);
// If you can disable a port, then you should be able to
@@ -546,6 +570,7 @@
portDef.format.video.nFrameWidth,
portDef.format.video.nFrameHeight,
&portDef.format.video.nStride,
+ portDef.format.video.eColorFormat,
portDef.nBufferCountActual);
}
status = observer->dequeueMessage(&msg, DEFAULT_TIMEOUT,
@@ -556,6 +581,16 @@
ASSERT_EQ(msg.data.eventData.data1, OMX_CommandPortEnable);
ASSERT_EQ(msg.data.eventData.data2, kPortIndexOutput);
+ // Push back deferred messages to the list
+ android::List<Message>::iterator it = msgQueueDefer.begin();
+ while (it != msgQueueDefer.end()) {
+ status = omxNode->dispatchMessage(*it);
+ ASSERT_EQ(
+ status,
+ ::android::hardware::media::omx::V1_0::Status::OK);
+ it++;
+ }
+
// dispatch output buffers
for (size_t i = 0; i < oBuffer->size(); i++) {
dispatchOutputBuffer(omxNode, oBuffer, i, oPortMode);
@@ -575,9 +610,6 @@
std::cout << "[ ] Warning ! OMX_EventError/ "
"Decode Frame Call might be failed \n";
return;
- } else if (msg.data.eventData.event == OMX_EventBufferFlag) {
- // soft omx components donot send this, we will just ignore it
- // for now
} else {
// something unexpected happened
ASSERT_TRUE(false);
@@ -602,7 +634,7 @@
EXPECT_EQ(msg.type, Message::Type::EVENT);
portReconfiguration(omxNode, observer, iBuffer, oBuffer,
kPortIndexInput, kPortIndexOutput, msg,
- oPortMode);
+ oPortMode, nullptr);
}
// status == TIMED_OUT, it could be due to process time being large
// than DEFAULT_TIMEOUT or component needs output buffers to start
@@ -668,7 +700,7 @@
msg.type == Message::Type::EVENT) {
portReconfiguration(omxNode, observer, iBuffer, oBuffer,
kPortIndexInput, kPortIndexOutput, msg,
- oPortMode);
+ oPortMode, nullptr);
}
if (frameID == (int)Info->size() || frameID == (offset + range)) break;
@@ -709,6 +741,116 @@
}
}
+// DescribeColorFormatParams Copy Constructor (Borrowed from OMXUtils.cpp)
+android::DescribeColorFormatParams::DescribeColorFormatParams(
+ const android::DescribeColorFormat2Params& params) {
+ eColorFormat = params.eColorFormat;
+ nFrameWidth = params.nFrameWidth;
+ nFrameHeight = params.nFrameHeight;
+ nStride = params.nStride;
+ nSliceHeight = params.nSliceHeight;
+ bUsingNativeBuffers = params.bUsingNativeBuffers;
+};
+
+bool isColorFormatFlexibleYUV(sp<IOmxNode> omxNode,
+ OMX_COLOR_FORMATTYPE eColorFormat) {
+ android::hardware::media::omx::V1_0::Status status;
+ unsigned int index = OMX_IndexMax, index2 = OMX_IndexMax;
+ omxNode->getExtensionIndex(
+ "OMX.google.android.index.describeColorFormat",
+ [&index](android::hardware::media::omx::V1_0::Status _s,
+ unsigned int _nl) {
+ if (_s == ::android::hardware::media::omx::V1_0::Status::OK)
+ index = _nl;
+ });
+ omxNode->getExtensionIndex(
+ "OMX.google.android.index.describeColorFormat2",
+ [&index2](android::hardware::media::omx::V1_0::Status _s,
+ unsigned int _nl) {
+ if (_s == ::android::hardware::media::omx::V1_0::Status::OK)
+ index2 = _nl;
+ });
+
+ android::DescribeColorFormat2Params describeParams;
+ describeParams.eColorFormat = eColorFormat;
+ describeParams.nFrameWidth = 128;
+ describeParams.nFrameHeight = 128;
+ describeParams.nStride = 128;
+ describeParams.nSliceHeight = 128;
+ describeParams.bUsingNativeBuffers = OMX_FALSE;
+ if (index != OMX_IndexMax) {
+ android::DescribeColorFormatParams describeParamsV1(describeParams);
+ status = getParam(omxNode, static_cast<OMX_INDEXTYPE>(index),
+ &describeParamsV1);
+ if (status == ::android::hardware::media::omx::V1_0::Status::OK) {
+ android::MediaImage& img = describeParamsV1.sMediaImage;
+ if (img.mType == android::MediaImage::MEDIA_IMAGE_TYPE_YUV) {
+ if (img.mNumPlanes == 3 &&
+ img.mPlane[img.Y].mHorizSubsampling == 1 &&
+ img.mPlane[img.Y].mVertSubsampling == 1) {
+ if (img.mPlane[img.U].mHorizSubsampling == 2 &&
+ img.mPlane[img.U].mVertSubsampling == 2 &&
+ img.mPlane[img.V].mHorizSubsampling == 2 &&
+ img.mPlane[img.V].mVertSubsampling == 2) {
+ if (img.mBitDepth <= 8) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+ } else if (index2 != OMX_IndexMax) {
+ status = getParam(omxNode, static_cast<OMX_INDEXTYPE>(index2),
+ &describeParams);
+ android::MediaImage2& img = describeParams.sMediaImage;
+ if (img.mType == android::MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
+ if (img.mNumPlanes == 3 &&
+ img.mPlane[img.Y].mHorizSubsampling == 1 &&
+ img.mPlane[img.Y].mVertSubsampling == 1) {
+ if (img.mPlane[img.U].mHorizSubsampling == 2 &&
+ img.mPlane[img.U].mVertSubsampling == 2 &&
+ img.mPlane[img.V].mHorizSubsampling == 2 &&
+ img.mPlane[img.V].mVertSubsampling == 2) {
+ if (img.mBitDepth <= 8) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+ return false;
+}
+
+// get default color format for output port
+void getDefaultColorFormat(sp<IOmxNode> omxNode, OMX_U32 kPortIndexOutput,
+ PortMode oPortMode,
+ OMX_COLOR_FORMATTYPE* eColorFormat) {
+ android::hardware::media::omx::V1_0::Status status;
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ *eColorFormat = OMX_COLOR_FormatUnused;
+ portFormat.nIndex = 0;
+ while (1) {
+ status = getPortParam(omxNode, OMX_IndexParamVideoPortFormat,
+ kPortIndexOutput, &portFormat);
+ if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
+ EXPECT_EQ(portFormat.eCompressionFormat, OMX_VIDEO_CodingUnused);
+ if (oPortMode != PortMode::PRESET_BYTE_BUFFER) {
+ *eColorFormat = portFormat.eColorFormat;
+ break;
+ }
+ if (isColorFormatFlexibleYUV(omxNode, portFormat.eColorFormat)) {
+ *eColorFormat = portFormat.eColorFormat;
+ break;
+ }
+ if (OMX_COLOR_FormatYUV420SemiPlanar == portFormat.eColorFormat ||
+ OMX_COLOR_FormatYUV420Planar == portFormat.eColorFormat) {
+ *eColorFormat = portFormat.eColorFormat;
+ break;
+ }
+ portFormat.nIndex++;
+ }
+}
+
// set component role
TEST_F(VideoDecHidlTest, SetRole) {
description("Test Set Component Role");
@@ -770,7 +912,7 @@
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true);
android::Vector<FrameData> Info;
- int bytesCount = 0;
+ int bytesCount = 0, maxBytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
timestampDevTest = true;
@@ -779,11 +921,17 @@
eleInfo >> flags;
eleInfo >> timestamp;
Info.push_back({bytesCount, flags, timestamp});
- if (flags != OMX_BUFFERFLAG_CODECCONFIG)
+ if (timestampDevTest && (flags != OMX_BUFFERFLAG_CODECCONFIG))
timestampUslist.push_back(timestamp);
+ if (maxBytesCount < bytesCount) maxBytesCount = bytesCount;
}
eleInfo.close();
+ // As the frame sizes are known ahead, use it to configure i/p buffer size
+ maxBytesCount = ALIGN_POWER_OF_TWO(maxBytesCount, 10);
+ status = setPortBufferSize(omxNode, kPortIndexInput, maxBytesCount);
+ ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+
// set port mode
portMode[0] = PortMode::PRESET_BYTE_BUFFER;
portMode[1] = PortMode::DYNAMIC_ANW_BUFFER;
@@ -798,11 +946,21 @@
// set Port Params
uint32_t nFrameWidth, nFrameHeight, xFramerate;
- OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatYUV420Planar;
getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
&xFramerate);
+ // get default color format
+ OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatUnused;
+ getDefaultColorFormat(omxNode, kPortIndexOutput, portMode[1],
+ &eColorFormat);
+ ASSERT_NE(eColorFormat, OMX_COLOR_FormatUnused);
+ status =
+ setVideoPortFormat(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, xFramerate);
+ EXPECT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+
+ // disabling adaptive playback.
omxNode->prepareForAdaptivePlayback(kPortIndexOutput, false, 1920, 1080);
android::Vector<BufferInfo> iBuffer, oBuffer;
@@ -822,7 +980,8 @@
allocateGraphicBuffers(
omxNode, kPortIndexOutput, &oBuffer,
portDef.format.video.nFrameWidth, portDef.format.video.nFrameHeight,
- &portDef.format.video.nStride, portDef.nBufferCountActual);
+ &portDef.format.video.nStride, portDef.format.video.eColorFormat,
+ portDef.nBufferCountActual);
}
// Port Reconfiguration
@@ -834,8 +993,9 @@
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer,
kPortIndexInput, kPortIndexOutput, portMode[1]);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag, portMode);
- EXPECT_EQ(timestampUslist.empty(), true);
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag, portMode,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, nullptr);
+ if (timestampDevTest) EXPECT_EQ(timestampUslist.empty(), true);
// set state to idle
changeStateExecutetoIdle(omxNode, observer, &iBuffer, &oBuffer);
// set state to executing
@@ -859,22 +1019,28 @@
kPortIndexOutput = kPortIndexInput + 1;
}
- // set Port Params
- uint32_t nFrameWidth, nFrameHeight, xFramerate;
- OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatYUV420Planar;
- getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
- &xFramerate);
- setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
- eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
-
// set port mode
- PortMode portMode[2];
- portMode[0] = portMode[1] = PortMode::PRESET_BYTE_BUFFER;
status = omxNode->setPortMode(kPortIndexInput, portMode[0]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
status = omxNode->setPortMode(kPortIndexOutput, portMode[1]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ // set Port Params
+ uint32_t nFrameWidth, nFrameHeight, xFramerate;
+ getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
+ &xFramerate);
+ // get default color format
+ OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatUnused;
+ getDefaultColorFormat(omxNode, kPortIndexOutput, portMode[1],
+ &eColorFormat);
+ ASSERT_NE(eColorFormat, OMX_COLOR_FormatUnused);
+ status =
+ setVideoPortFormat(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, xFramerate);
+ EXPECT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+
android::Vector<BufferInfo> iBuffer, oBuffer;
// set state to idle
@@ -884,7 +1050,8 @@
changeStateIdletoExecute(omxNode, observer);
// request EOS at the start
- testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, portMode);
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, portMode,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, nullptr);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
EXPECT_GE(framesReceived, 0U);
@@ -923,7 +1090,7 @@
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true);
android::Vector<FrameData> Info;
- int bytesCount = 0;
+ int bytesCount = 0, maxBytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
while (1) {
@@ -931,25 +1098,37 @@
eleInfo >> flags;
eleInfo >> timestamp;
Info.push_back({bytesCount, flags, timestamp});
+ if (maxBytesCount < bytesCount) maxBytesCount = bytesCount;
}
eleInfo.close();
- // set Port Params
- uint32_t nFrameWidth, nFrameHeight, xFramerate;
- OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatYUV420Planar;
- getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
- &xFramerate);
- setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
- eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+ // As the frame sizes are known ahead, use it to configure i/p buffer size
+ maxBytesCount = ALIGN_POWER_OF_TWO(maxBytesCount, 10);
+ status = setPortBufferSize(omxNode, kPortIndexInput, maxBytesCount);
+ ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
// set port mode
- PortMode portMode[2];
- portMode[0] = portMode[1] = PortMode::PRESET_BYTE_BUFFER;
status = omxNode->setPortMode(kPortIndexInput, portMode[0]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
status = omxNode->setPortMode(kPortIndexOutput, portMode[1]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ // set Port Params
+ uint32_t nFrameWidth, nFrameHeight, xFramerate;
+ getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
+ &xFramerate);
+ // get default color format
+ OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatUnused;
+ getDefaultColorFormat(omxNode, kPortIndexOutput, portMode[1],
+ &eColorFormat);
+ ASSERT_NE(eColorFormat, OMX_COLOR_FormatUnused);
+ status =
+ setVideoPortFormat(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, xFramerate);
+ EXPECT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+
android::Vector<BufferInfo> iBuffer, oBuffer;
// set state to idle
@@ -968,7 +1147,8 @@
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer,
kPortIndexInput, kPortIndexOutput, portMode[1]);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag, portMode);
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag, portMode,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, nullptr);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
EXPECT_GE(framesReceived, 1U);
@@ -983,7 +1163,8 @@
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer,
kPortIndexInput, kPortIndexOutput, portMode[1]);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, portMode);
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, portMode,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, nullptr);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
EXPECT_GE(framesReceived, 1U);
@@ -1022,7 +1203,7 @@
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true);
android::Vector<FrameData> Info;
- int bytesCount = 0;
+ int bytesCount = 0, maxBytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
while (1) {
@@ -1030,25 +1211,37 @@
eleInfo >> flags;
eleInfo >> timestamp;
Info.push_back({bytesCount, flags, timestamp});
+ if (maxBytesCount < bytesCount) maxBytesCount = bytesCount;
}
eleInfo.close();
- // set Port Params
- uint32_t nFrameWidth, nFrameHeight, xFramerate;
- OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatYUV420Planar;
- getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
- &xFramerate);
- setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
- eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+ // As the frame sizes are known ahead, use it to configure i/p buffer size
+ maxBytesCount = ALIGN_POWER_OF_TWO(maxBytesCount, 10);
+ status = setPortBufferSize(omxNode, kPortIndexInput, maxBytesCount);
+ ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
// set port mode
- PortMode portMode[2];
- portMode[0] = portMode[1] = PortMode::PRESET_BYTE_BUFFER;
status = omxNode->setPortMode(kPortIndexInput, portMode[0]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
status = omxNode->setPortMode(kPortIndexOutput, portMode[1]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ // set Port Params
+ uint32_t nFrameWidth, nFrameHeight, xFramerate;
+ getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
+ &xFramerate);
+ // get default color format
+ OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatUnused;
+ getDefaultColorFormat(omxNode, kPortIndexOutput, portMode[1],
+ &eColorFormat);
+ ASSERT_NE(eColorFormat, OMX_COLOR_FormatUnused);
+ status =
+ setVideoPortFormat(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, xFramerate);
+ EXPECT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+
android::Vector<BufferInfo> iBuffer, oBuffer;
// set state to idle
@@ -1066,7 +1259,8 @@
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer,
kPortIndexInput, kPortIndexOutput, portMode[1]);
- testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, portMode);
+ testEOS(omxNode, observer, &iBuffer, &oBuffer, true, eosFlag, portMode,
+ portReconfiguration, kPortIndexInput, kPortIndexOutput, nullptr);
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput);
framesReceived = 0;
@@ -1104,7 +1298,7 @@
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true);
android::Vector<FrameData> Info;
- int bytesCount = 0;
+ int bytesCount = 0, maxBytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
while (1) {
@@ -1112,25 +1306,37 @@
eleInfo >> flags;
eleInfo >> timestamp;
Info.push_back({bytesCount, flags, timestamp});
+ if (maxBytesCount < bytesCount) maxBytesCount = bytesCount;
}
eleInfo.close();
- // set Port Params
- uint32_t nFrameWidth, nFrameHeight, xFramerate;
- OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatYUV420Planar;
- getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
- &xFramerate);
- setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
- eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+ // As the frame sizes are known ahead, use it to configure i/p buffer size
+ maxBytesCount = ALIGN_POWER_OF_TWO(maxBytesCount, 10);
+ status = setPortBufferSize(omxNode, kPortIndexInput, maxBytesCount);
+ ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
// set port mode
- PortMode portMode[2];
- portMode[0] = portMode[1] = PortMode::PRESET_BYTE_BUFFER;
status = omxNode->setPortMode(kPortIndexInput, portMode[0]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
status = omxNode->setPortMode(kPortIndexOutput, portMode[1]);
ASSERT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ // set Port Params
+ uint32_t nFrameWidth, nFrameHeight, xFramerate;
+ getInputChannelInfo(omxNode, kPortIndexInput, &nFrameWidth, &nFrameHeight,
+ &xFramerate);
+ // get default color format
+ OMX_COLOR_FORMATTYPE eColorFormat = OMX_COLOR_FormatUnused;
+ getDefaultColorFormat(omxNode, kPortIndexOutput, portMode[1],
+ &eColorFormat);
+ ASSERT_NE(eColorFormat, OMX_COLOR_FormatUnused);
+ status =
+ setVideoPortFormat(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, xFramerate);
+ EXPECT_EQ(status, ::android::hardware::media::omx::V1_0::Status::OK);
+ setDefaultPortParam(omxNode, kPortIndexOutput, OMX_VIDEO_CodingUnused,
+ eColorFormat, nFrameWidth, nFrameHeight, 0, xFramerate);
+
android::Vector<BufferInfo> iBuffer, oBuffer;
// set state to idle
@@ -1172,6 +1378,7 @@
Info.size() - index, portMode[1], false);
}
// Note: Assumes 200 ms is enough to end any decode call that started
+ eleStream.close();
flushPorts(omxNode, observer, &iBuffer, &oBuffer, kPortIndexInput,
kPortIndexOutput, 200000);
framesReceived = 0;
diff --git a/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoEncTest.cpp b/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoEncTest.cpp
index cd6eaf5..bbe0843 100644
--- a/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoEncTest.cpp
+++ b/media/omx/1.0/vts/functional/video/VtsHalMediaOmxV1_0TargetVideoEncTest.cpp
@@ -1286,7 +1286,7 @@
eleStream.close();
waitOnInputConsumption(omxNode, observer, &iBuffer, &oBuffer);
testEOS(omxNode, observer, &iBuffer, &oBuffer, false, eosFlag);
- EXPECT_EQ(timestampUslist.empty(), true);
+ if (timestampDevTest) EXPECT_EQ(timestampUslist.empty(), true);
// set state to idle
changeStateExecutetoIdle(omxNode, observer, &iBuffer, &oBuffer);
diff --git a/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.cpp b/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.cpp
index 77763d1..91aecf2 100644
--- a/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.cpp
+++ b/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.cpp
@@ -52,68 +52,6 @@
#include <media_video_hidl_test_common.h>
#include <memory>
-Return<android::hardware::media::omx::V1_0::Status> setVideoPortFormat(
- sp<IOmxNode> omxNode, OMX_U32 portIndex,
- OMX_VIDEO_CODINGTYPE eCompressionFormat, OMX_COLOR_FORMATTYPE eColorFormat,
- OMX_U32 xFramerate) {
- OMX_U32 index = 0;
- OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
- std::vector<OMX_COLOR_FORMATTYPE> arrColorFormat;
- std::vector<OMX_VIDEO_CODINGTYPE> arrCompressionFormat;
- android::hardware::media::omx::V1_0::Status status;
-
- while (1) {
- portFormat.nIndex = index;
- status = getPortParam(omxNode, OMX_IndexParamVideoPortFormat, portIndex,
- &portFormat);
- if (status != ::android::hardware::media::omx::V1_0::Status::OK) break;
- if (eCompressionFormat == OMX_VIDEO_CodingUnused)
- arrColorFormat.push_back(portFormat.eColorFormat);
- else
- arrCompressionFormat.push_back(portFormat.eCompressionFormat);
- index++;
- if (index == 512) {
- // enumerated way too many formats, highly unusual for this to
- // happen.
- EXPECT_LE(index, 512U)
- << "Expecting OMX_ErrorNoMore but not received";
- break;
- }
- }
- if (!index) return status;
- if (eCompressionFormat == OMX_VIDEO_CodingUnused) {
- for (index = 0; index < arrColorFormat.size(); index++) {
- if (arrColorFormat[index] == eColorFormat) {
- portFormat.eColorFormat = arrColorFormat[index];
- break;
- }
- }
- if (index == arrColorFormat.size()) {
- ALOGE("setting default color format %x", (int)arrColorFormat[0]);
- portFormat.eColorFormat = arrColorFormat[0];
- }
- portFormat.eCompressionFormat = OMX_VIDEO_CodingUnused;
- } else {
- for (index = 0; index < arrCompressionFormat.size(); index++) {
- if (arrCompressionFormat[index] == eCompressionFormat) {
- portFormat.eCompressionFormat = arrCompressionFormat[index];
- break;
- }
- }
- if (index == arrCompressionFormat.size()) {
- ALOGE("setting default compression format %x",
- (int)arrCompressionFormat[0]);
- portFormat.eCompressionFormat = arrCompressionFormat[0];
- }
- portFormat.eColorFormat = OMX_COLOR_FormatUnused;
- }
- portFormat.nIndex = 0;
- portFormat.xFramerate = xFramerate;
- status = setPortParam(omxNode, OMX_IndexParamVideoPortFormat, portIndex,
- &portFormat);
- return status;
-}
-
void enumerateProfileAndLevel(sp<IOmxNode> omxNode, OMX_U32 portIndex,
std::vector<int32_t>* arrProfile,
std::vector<int32_t>* arrLevel) {
diff --git a/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.h b/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.h
index e492779..c1d7aea 100644
--- a/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.h
+++ b/media/omx/1.0/vts/functional/video/media_video_hidl_test_common.h
@@ -26,11 +26,6 @@
* Common video utils
*/
-Return<android::hardware::media::omx::V1_0::Status> setVideoPortFormat(
- sp<IOmxNode> omxNode, OMX_U32 portIndex,
- OMX_VIDEO_CODINGTYPE eCompressionFormat, OMX_COLOR_FORMATTYPE eColorFormat,
- OMX_U32 xFramerate);
-
void enumerateProfileAndLevel(sp<IOmxNode> omxNode, OMX_U32 portIndex,
std::vector<int32_t>* arrProfile,
std::vector<int32_t>* arrLevel);
diff --git a/nfc/1.0/vts/functional/VtsHalNfcV1_0TargetTest.cpp b/nfc/1.0/vts/functional/VtsHalNfcV1_0TargetTest.cpp
index e67f94f..2f00fbb 100644
--- a/nfc/1.0/vts/functional/VtsHalNfcV1_0TargetTest.cpp
+++ b/nfc/1.0/vts/functional/VtsHalNfcV1_0TargetTest.cpp
@@ -44,6 +44,8 @@
{ 0x20, 0x04, 0x02, 0x01, 0x00 }
#define CORE_INIT_CMD \
{ 0x20, 0x01, 0x00 }
+#define CORE_INIT_CMD_NCI20 \
+ { 0x20, 0x01, 0x02, 0x00, 0x00 }
#define INVALID_COMMAND \
{ 0x20, 0x00, 0x00 }
@@ -290,16 +292,23 @@
// Wait for CORE_RESET_NTF
res = nfc_cb_->WaitForCallback(kCallbackNameSendData);
EXPECT_TRUE(res.no_timeout);
+ cmd = CORE_INIT_CMD_NCI20;
+ } else {
+ cmd = CORE_INIT_CMD;
}
-
- cmd = CORE_INIT_CMD;
data = cmd;
+
EXPECT_EQ(data.size(), nfc_->write(data));
// Wait for CORE_INIT_RSP
res = nfc_cb_->WaitForCallback(kCallbackNameSendData);
EXPECT_TRUE(res.no_timeout);
EXPECT_EQ((int)NfcStatus::OK, res.args->last_data_[3]);
-
+ if (nci_version == NCI_VERSION_2 && res.args->last_data_.size() > 13 &&
+ res.args->last_data_[13] == 0x00) {
+ // Wait for CORE_CONN_CREDITS_NTF
+ res = nfc_cb_->WaitForCallback(kCallbackNameSendData);
+ EXPECT_TRUE(res.no_timeout);
+ }
// Send an Error Data Packet
cmd = INVALID_COMMAND;
data = cmd;
@@ -347,15 +356,23 @@
// Wait for CORE_RESET_NTF
res = nfc_cb_->WaitForCallback(kCallbackNameSendData);
EXPECT_TRUE(res.no_timeout);
+ cmd = CORE_INIT_CMD_NCI20;
+ } else {
+ cmd = CORE_INIT_CMD;
}
-
- cmd = CORE_INIT_CMD;
data = cmd;
+
EXPECT_EQ(data.size(), nfc_->write(data));
// Wait for CORE_INIT_RSP
res = nfc_cb_->WaitForCallback(kCallbackNameSendData);
EXPECT_TRUE(res.no_timeout);
EXPECT_EQ((int)NfcStatus::OK, res.args->last_data_[3]);
+ if (nci_version == NCI_VERSION_2 && res.args->last_data_.size() > 13 &&
+ res.args->last_data_[13] == 0x00) {
+ // Wait for CORE_CONN_CREDITS_NTF
+ res = nfc_cb_->WaitForCallback(kCallbackNameSendData);
+ EXPECT_TRUE(res.no_timeout);
+ }
cmd = CORE_CONN_CREATE_CMD;
data = cmd;
diff --git a/radio/1.0/IRadioResponse.hal b/radio/1.0/IRadioResponse.hal
index 8697e57..45cdd1c 100644
--- a/radio/1.0/IRadioResponse.hal
+++ b/radio/1.0/IRadioResponse.hal
@@ -1186,6 +1186,7 @@
* RadioError:REQUEST_NOT_SUPPORTED
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
+ * RadioError:SIM_ABSENT
*/
oneway setSuppServiceNotificationsResponse(RadioResponseInfo info);
@@ -1289,6 +1290,7 @@
* RadioError:INVALID_ARGUMENTS
* RadioError:MODEM_ERR
* RadioError:REQUEST_NOT_SUPPORTED
+ * RadioError:SIM_ABSENT
*/
oneway sendEnvelopeResponse(RadioResponseInfo info, string commandResponse);
@@ -1306,6 +1308,7 @@
* RadioError:CANCELLED
* RadioError:INVALID_MODEM_STATE
* RadioError:REQUEST_NOT_SUPPORTED
+ * RadioError:SIM_ABSENT
*/
oneway sendTerminalResponseToSimResponse(RadioResponseInfo info);
@@ -1323,6 +1326,7 @@
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
* RadioError:REQUEST_NOT_SUPPORTED
+ * RadioError:SIM_ABSENT
*/
oneway handleStkCallSetupRequestFromSimResponse(RadioResponseInfo info);
@@ -1419,6 +1423,7 @@
* RadioError:REQUEST_NOT_SUPPORTED
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
+ * RadioError:SIM_ABSENT
*/
oneway setLocationUpdatesResponse(RadioResponseInfo info);
@@ -1453,6 +1458,7 @@
* RadioError:OPERATION_NOT_ALLOWED
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
+ * RadioError:SIM_ABSENT
*/
oneway setCdmaRoamingPreferenceResponse(RadioResponseInfo info);
@@ -1471,6 +1477,7 @@
* RadioError:REQUEST_NOT_SUPPORTED
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
+ * RadioError:SIM_ABSENT
*/
oneway getCdmaRoamingPreferenceResponse(RadioResponseInfo info, CdmaRoamingType type);
@@ -1795,6 +1802,7 @@
* RadioError:REQUEST_NOT_SUPPORTED
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
+ * RadioError:SIM_ABSENT
*/
oneway getCDMASubscriptionResponse(RadioResponseInfo info, string mdn, string hSid,
string hNid, string min, string prl);
@@ -1888,6 +1896,7 @@
* RadioError:REQUEST_NOT_SUPPORTED
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
+ * RadioError:SIM_ABSENT
*/
oneway exitEmergencyCallbackModeResponse(RadioResponseInfo info);
@@ -1980,6 +1989,7 @@
* RadioError:NO_RESOURCES
* RadioError:CANCELLED
* RadioError:REQUEST_NOT_SUPPORTED
+ * RadioError:SIM_ABSENT
*/
oneway getCdmaSubscriptionSourceResponse(RadioResponseInfo info, CdmaSubscriptionSource source);
@@ -1997,6 +2007,7 @@
* RadioError:INVALID_MODEM_STATE
* RadioError:INVALID_ARGUMENTS
* RadioError:REQUEST_NOT_SUPPORTED
+ * RadioError:SIM_ABSENT
*/
oneway requestIsimAuthenticationResponse(RadioResponseInfo info, string response);
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_cell_broadcast.cpp b/radio/1.0/vts/functional/radio_hidl_hal_cell_broadcast.cpp
index 14d14d4..54855fb 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_cell_broadcast.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_cell_broadcast.cpp
@@ -122,7 +122,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError() ||
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
}
}
@@ -140,7 +141,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError());
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ CheckGeneralError());
}
}
@@ -158,7 +160,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError() ||
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
}
}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_data.cpp b/radio/1.0/vts/functional/radio_hidl_hal_data.cpp
index 108676b..1e0cff4 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_data.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_data.cpp
@@ -76,9 +76,13 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
- radioRsp->rspInfo.error == RadioError::RADIO_NOT_AVAILABLE ||
- radioRsp->rspInfo.error == RadioError::OP_NOT_ALLOWED_BEFORE_REG_TO_NW ||
- radioRsp->rspInfo.error == RadioError::OP_NOT_ALLOWED_DURING_VOICE_CALL ||
+ radioRsp->rspInfo.error ==
+ RadioError::RADIO_NOT_AVAILABLE ||
+ radioRsp->rspInfo.error ==
+ RadioError::OP_NOT_ALLOWED_BEFORE_REG_TO_NW ||
+ radioRsp->rspInfo.error ==
+ RadioError::OP_NOT_ALLOWED_DURING_VOICE_CALL ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT ||
CheckOEMError());
}
}
@@ -98,7 +102,10 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- EXPECT_EQ(RadioError::INVALID_CALL_ID, radioRsp->rspInfo.error);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::INVALID_CALL_ID ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT || CheckOEMError() ||
+ radioRsp->rspInfo.error == RadioError::RADIO_NOT_AVAILABLE);
}
}
@@ -116,7 +123,9 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
- radioRsp->rspInfo.error == RadioError::RADIO_NOT_AVAILABLE);
+ radioRsp->rspInfo.error ==
+ RadioError::RADIO_NOT_AVAILABLE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -220,6 +229,9 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- // TODO(shuoq): Will add error check when we know the expected error from QC
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::RADIO_NOT_AVAILABLE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp b/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp
index a8857c3..f50ee29 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_icc.cpp
@@ -317,6 +317,7 @@
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
radioRsp->rspInfo.error == RadioError::INVALID_SIM_STATE ||
radioRsp->rspInfo.error == RadioError::PASSWORD_INCORRECT ||
- radioRsp->rspInfo.error == RadioError::INTERNAL_ERR);
+ radioRsp->rspInfo.error == RadioError::INTERNAL_ERR ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_ims.cpp b/radio/1.0/vts/functional/radio_hidl_hal_ims.cpp
index 16465c7..e8a61b4 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_ims.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_ims.cpp
@@ -155,7 +155,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- EXPECT_EQ(RadioError::NONE, radioRsp->rspInfo.error);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -173,7 +174,7 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError());
+ ASSERT_TRUE(CheckGeneralError() || radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_misc.cpp b/radio/1.0/vts/functional/radio_hidl_hal_misc.cpp
index 9e4f668..6beb580 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_misc.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_misc.cpp
@@ -70,7 +70,7 @@
TEST_F(RadioHidlTest, setRadioPower) {
int serial = GetRandomSerialNumber();
- radio->setRadioPower(serial, 0);
+ radio->setRadioPower(serial, 1);
EXPECT_EQ(std::cv_status::no_timeout, wait());
EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
@@ -248,7 +248,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -264,7 +265,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -280,7 +282,9 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -297,7 +301,8 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(CheckGeneralError() || radioRsp->rspInfo.error == RadioError::NONE ||
- radioRsp->rspInfo.error == RadioError::MODEM_ERR);
+ radioRsp->rspInfo.error == RadioError::MODEM_ERR ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -345,7 +350,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -361,7 +367,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -377,7 +384,9 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -393,7 +402,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::EMPTY_RECORD);
}
}
@@ -409,7 +419,9 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -425,7 +437,9 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -494,7 +508,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -562,7 +577,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -671,9 +687,11 @@
if (cardStatus.cardState == CardState::ABSENT) {
std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::RADIO_NOT_AVAILABLE ||
+ ASSERT_TRUE(radioRsp->rspInfo.error ==
+ RadioError::RADIO_NOT_AVAILABLE ||
radioRsp->rspInfo.error == RadioError::LCE_NOT_SUPPORTED ||
- radioRsp->rspInfo.error == RadioError::INTERNAL_ERR);
+ radioRsp->rspInfo.error == RadioError::INTERNAL_ERR ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -690,7 +708,10 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
- radioRsp->rspInfo.error == RadioError::LCE_NOT_SUPPORTED);
+ radioRsp->rspInfo.error == RadioError::LCE_NOT_SUPPORTED ||
+ radioRsp->rspInfo.error ==
+ RadioError::REQUEST_NOT_SUPPORTED ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -706,8 +727,11 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::RADIO_NOT_AVAILABLE || CheckOEMError() ||
- radioRsp->rspInfo.error == RadioError::INTERNAL_ERR);
+ ASSERT_TRUE(radioRsp->rspInfo.error ==
+ RadioError::RADIO_NOT_AVAILABLE ||
+ CheckOEMError() ||
+ radioRsp->rspInfo.error == RadioError::INTERNAL_ERR ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -723,7 +747,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -747,7 +772,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- EXPECT_EQ(radioRsp->rspInfo.error, RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
/* Reset back to no carrier restriction */
@@ -761,7 +787,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- EXPECT_EQ(radioRsp->rspInfo.error, RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -777,7 +804,8 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- EXPECT_EQ(radioRsp->rspInfo.error, RadioError::NONE);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -792,9 +820,11 @@
EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
+ std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
+
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError() ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -809,9 +839,11 @@
EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
+ std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
+
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError() ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
+ ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
}
@@ -831,4 +863,4 @@
ASSERT_TRUE(radioRsp->rspInfo.error == RadioError::NONE ||
radioRsp->rspInfo.error == RadioError::REQUEST_NOT_SUPPORTED);
}
-}
\ No newline at end of file
+}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_sms.cpp b/radio/1.0/vts/functional/radio_hidl_hal_sms.cpp
index 9aa7663..d88232a 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_sms.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_sms.cpp
@@ -36,7 +36,8 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::INVALID_STATE);
+ radioRsp->rspInfo.error == RadioError::INVALID_STATE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
EXPECT_EQ(0, radioRsp->sendSmsResult.errorCode);
}
}
@@ -62,7 +63,8 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::INVALID_STATE);
+ radioRsp->rspInfo.error == RadioError::INVALID_STATE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -145,7 +147,8 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::INVALID_STATE);
+ radioRsp->rspInfo.error == RadioError::INVALID_STATE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -236,8 +239,11 @@
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError() || radioRsp->rspInfo.error == RadioError::INVALID_STATE ||
- radioRsp->rspInfo.error == RadioError::INVALID_MODEM_STATE);
+ ASSERT_TRUE(CheckGeneralError() ||
+ radioRsp->rspInfo.error == RadioError::INVALID_STATE ||
+ radioRsp->rspInfo.error ==
+ RadioError::INVALID_MODEM_STATE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -257,7 +263,8 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::INVALID_SMS_FORMAT);
+ radioRsp->rspInfo.error == RadioError::INVALID_SMS_FORMAT ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -285,7 +292,9 @@
radioRsp->rspInfo.error == RadioError::ENCODING_ERR ||
radioRsp->rspInfo.error == RadioError::NO_RESOURCES ||
radioRsp->rspInfo.error == RadioError::NETWORK_NOT_READY ||
- radioRsp->rspInfo.error == RadioError::INVALID_SMSC_ADDRESS);
+ radioRsp->rspInfo.error ==
+ RadioError::INVALID_SMSC_ADDRESS ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -308,7 +317,9 @@
radioRsp->rspInfo.error == RadioError::NONE ||
radioRsp->rspInfo.error == RadioError::MODEM_ERR ||
radioRsp->rspInfo.error == RadioError::NO_SUCH_ENTRY ||
- radioRsp->rspInfo.error == RadioError::INVALID_MODEM_STATE);
+ radioRsp->rspInfo.error ==
+ RadioError::INVALID_MODEM_STATE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -361,7 +372,9 @@
radioRsp->rspInfo.error == RadioError::NONE ||
radioRsp->rspInfo.error == RadioError::MODEM_ERR ||
radioRsp->rspInfo.error == RadioError::NO_SUCH_ENTRY ||
- radioRsp->rspInfo.error == RadioError::INVALID_SMSC_ADDRESS);
+ radioRsp->rspInfo.error ==
+ RadioError::INVALID_SMSC_ADDRESS ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -413,7 +426,9 @@
radioRsp->rspInfo.error == RadioError::NONE ||
radioRsp->rspInfo.error == RadioError::MODEM_ERR ||
radioRsp->rspInfo.error == RadioError::NO_SUCH_ENTRY ||
- radioRsp->rspInfo.error == RadioError::INVALID_MODEM_STATE);
+ radioRsp->rspInfo.error ==
+ RadioError::INVALID_MODEM_STATE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -434,6 +449,7 @@
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
radioRsp->rspInfo.error == RadioError::INVALID_STATE ||
- radioRsp->rspInfo.error == RadioError::MODEM_ERR);
+ radioRsp->rspInfo.error == RadioError::MODEM_ERR ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_stk.cpp b/radio/1.0/vts/functional/radio_hidl_hal_stk.cpp
index 774df98..1a755ea 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_stk.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_stk.cpp
@@ -37,24 +37,8 @@
std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::NONE);
- }
-
- // Test with sending random string
- serial = GetRandomSerialNumber();
- content = "0";
-
- radio->sendEnvelope(serial, content);
-
- EXPECT_EQ(std::cv_status::no_timeout, wait());
- EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
- EXPECT_EQ(serial, radioRsp->rspInfo.serial);
-
- if (cardStatus.cardState == CardState::ABSENT) {
- std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
- ASSERT_TRUE(CheckGeneralError() ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::NONE);
+ radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -77,24 +61,8 @@
std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
- radioRsp->rspInfo.error == RadioError::NONE);
- }
-
- serial = GetRandomSerialNumber();
-
- // Test with sending random string
- commandResponse = "0";
-
- radio->sendTerminalResponseToSim(serial, commandResponse);
-
- EXPECT_EQ(std::cv_status::no_timeout, wait());
- EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
- EXPECT_EQ(serial, radioRsp->rspInfo.serial);
-
- if (cardStatus.cardState == CardState::ABSENT) {
- std::cout << static_cast<int>(radioRsp->rspInfo.error) << std::endl;
- ASSERT_TRUE(CheckGeneralError() ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
+ radioRsp->rspInfo.error == RadioError::NONE ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -115,7 +83,8 @@
ASSERT_TRUE(CheckGeneralError() ||
radioRsp->rspInfo.error == RadioError::NONE ||
radioRsp->rspInfo.error == RadioError::MODEM_ERR ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
+ radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
@@ -155,21 +124,7 @@
if (cardStatus.cardState == CardState::ABSENT) {
ASSERT_TRUE(CheckGeneralError() ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
- }
-
- // Test with sending random string
- serial = GetRandomSerialNumber();
- contents = "0";
-
- radio->sendEnvelopeWithStatus(serial, contents);
-
- EXPECT_EQ(std::cv_status::no_timeout, wait());
- EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
- EXPECT_EQ(serial, radioRsp->rspInfo.serial);
-
- if (cardStatus.cardState == CardState::ABSENT) {
- ASSERT_TRUE(CheckGeneralError() ||
- radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS);
+ radioRsp->rspInfo.error == RadioError::INVALID_ARGUMENTS ||
+ radioRsp->rspInfo.error == RadioError::SIM_ABSENT);
}
}
diff --git a/radio/1.0/vts/functional/radio_hidl_hal_test.cpp b/radio/1.0/vts/functional/radio_hidl_hal_test.cpp
index b957c6e..c529a1f 100644
--- a/radio/1.0/vts/functional/radio_hidl_hal_test.cpp
+++ b/radio/1.0/vts/functional/radio_hidl_hal_test.cpp
@@ -19,6 +19,11 @@
void RadioHidlTest::SetUp() {
radio =
::testing::VtsHalHidlTargetTestBase::getService<IRadio>(hidl_string(RADIO_SERVICE_NAME));
+ if (radio == NULL) {
+ sleep(60);
+ radio = ::testing::VtsHalHidlTargetTestBase::getService<IRadio>(
+ hidl_string(RADIO_SERVICE_NAME));
+ }
ASSERT_NE(radio, nullptr);
radioRsp = new RadioResponse(*this);
@@ -35,6 +40,10 @@
EXPECT_EQ(RadioResponseType::SOLICITED, radioRsp->rspInfo.type);
EXPECT_EQ(serial, radioRsp->rspInfo.serial);
EXPECT_EQ(RadioError::NONE, radioRsp->rspInfo.error);
+
+ /* Vts Testing with Sim Absent only. This needs to be removed later in P when sim present
+ * scenarios will be tested. */
+ EXPECT_EQ(CardState::ABSENT, cardStatus.cardState);
}
void RadioHidlTest::TearDown() {}
diff --git a/sensors/1.0/vts/functional/VtsHalSensorsV1_0TargetTest.cpp b/sensors/1.0/vts/functional/VtsHalSensorsV1_0TargetTest.cpp
index 51d7645..e4736bc 100644
--- a/sensors/1.0/vts/functional/VtsHalSensorsV1_0TargetTest.cpp
+++ b/sensors/1.0/vts/functional/VtsHalSensorsV1_0TargetTest.cpp
@@ -1236,6 +1236,11 @@
SensorInfo sensor = defaultSensorByType(type);
+ if (!isValidType(sensor.type)) {
+ // no default sensor of this type
+ return;
+ }
+
if (!isDirectReportRateSupported(sensor, rate)) {
return;
}
diff --git a/wifi/supplicant/1.0/vts/functional/VtsHalWifiSupplicantV1_0TargetTest.cpp b/wifi/supplicant/1.0/vts/functional/VtsHalWifiSupplicantV1_0TargetTest.cpp
index a69d14d..33f3049 100644
--- a/wifi/supplicant/1.0/vts/functional/VtsHalWifiSupplicantV1_0TargetTest.cpp
+++ b/wifi/supplicant/1.0/vts/functional/VtsHalWifiSupplicantV1_0TargetTest.cpp
@@ -23,12 +23,9 @@
class SupplicantHidlEnvironment : public ::testing::Environment {
public:
virtual void SetUp() override {
- stopWifiFramework();
stopSupplicant();
}
virtual void TearDown() override {
- startWifiFramework();
- // Framework will start wpa_supplicant.
}
};
diff --git a/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test_utils.cpp b/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test_utils.cpp
index df4bfa9..79be2b0 100644
--- a/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test_utils.cpp
+++ b/wifi/supplicant/1.0/vts/functional/supplicant_hidl_test_utils.cpp
@@ -135,19 +135,6 @@
std::condition_variable condition_;
};
-void stopWifiFramework() {
- ASSERT_EQ(std::system("stop"), 0);
- // TODO: Use some other mechanism to wait for the framework to
- // finish disabling.
- sleep(5);
-}
-
-void startWifiFramework() {
- ASSERT_EQ(std::system("start"), 0);
- // These tests don't care whether the framework
- // finished enabling or not.
-}
-
void stopSupplicant() {
DriverTool driver_tool;
SupplicantManager supplicant_manager;