Merge "IMediaExtractor: ensure users to check returned value by getTrack." into oc-dev
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index 4ddb8d3..1282124 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -528,6 +528,10 @@
         mDelayXOffset = xOffset;
     }
 
+    bool isStarted() const {
+        return mStartFrame >= 0;
+    }
+
     std::pair<T /* volume */, bool /* active */> getVolume(
             int64_t trackFrameCount, double trackSampleRate) {
         if ((getFlags() & VolumeShaper::Operation::FLAG_DELAY) != 0) {
@@ -752,6 +756,8 @@
         return it->getState();
     }
 
+    // getVolume() is not const, as it updates internal state.
+    // Once called, any VolumeShapers not already started begin running.
     std::pair<T /* volume */, bool /* active */> getVolume(int64_t trackFrameCount) {
         AutoMutex _l(mLock);
         mLastFrame = trackFrameCount;
@@ -768,6 +774,14 @@
         return mLastVolume;
     }
 
+    // Used by a client side VolumeHandler to ensure all the VolumeShapers
+    // indicate that they have been started.  Upon a change in audioserver
+    // output sink, this information is used for restoration of the server side
+    // VolumeHandler.
+    void setStarted() {
+        (void)getVolume(mLastFrame);  // getVolume() will start the individual VolumeShapers.
+    }
+
     std::pair<T /* volume */, bool /* active */> getLastVolume() const {
         AutoMutex _l(mLock);
         return mLastVolume;
@@ -784,14 +798,12 @@
         return ss.str();
     }
 
-    void forall(const std::function<VolumeShaper::Status (
-            const sp<VolumeShaper::Configuration> &configuration,
-            const sp<VolumeShaper::Operation> &operation)> &lambda) {
+    void forall(const std::function<VolumeShaper::Status (const VolumeShaper &)> &lambda) {
         AutoMutex _l(mLock);
         VS_LOG("forall: mVolumeShapers.size() %zu", mVolumeShapers.size());
         for (const auto &shaper : mVolumeShapers) {
-            VS_LOG("forall applying lambda");
-            (void)lambda(shaper.mConfiguration, shaper.mOperation);
+            VolumeShaper::Status status = lambda(shaper);
+            VS_LOG("forall applying lambda on shaper (%p): %d", &shaper, (int)status);
         }
     }
 
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 3a0ce5e..4baf253 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -652,6 +652,9 @@
             get_sched_policy(0, &mPreviousSchedulingGroup);
             androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO);
         }
+
+        // Start our local VolumeHandler for restoration purposes.
+        mVolumeHandler->setStarted();
     } else {
         ALOGE("start() status %d", status);
         mState = previousState;
@@ -2254,17 +2257,20 @@
             }
         }
         // restore volume handler
-        mVolumeHandler->forall([this](const sp<VolumeShaper::Configuration> &configuration,
-                const sp<VolumeShaper::Operation> &operation) -> VolumeShaper::Status {
-            sp<VolumeShaper::Operation> operationToEnd = new VolumeShaper::Operation(*operation);
+        mVolumeHandler->forall([this](const VolumeShaper &shaper) -> VolumeShaper::Status {
+            sp<VolumeShaper::Operation> operationToEnd =
+                    new VolumeShaper::Operation(shaper.mOperation);
             // TODO: Ideally we would restore to the exact xOffset position
             // as returned by getVolumeShaperState(), but we don't have that
             // information when restoring at the client unless we periodically poll
             // the server or create shared memory state.
             //
-            // For now, we simply advance to the end of the VolumeShaper effect.
-            operationToEnd->setXOffset(1.f);
-            return mAudioTrack->applyVolumeShaper(configuration, operationToEnd);
+            // For now, we simply advance to the end of the VolumeShaper effect
+            // if it has been started.
+            if (shaper.isStarted()) {
+                operationToEnd->setXOffset(1.f);
+            }
+            return mAudioTrack->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
         });
 
         if (mState == STATE_ACTIVE) {
@@ -2334,19 +2340,36 @@
     AutoMutex lock(mLock);
     mVolumeHandler->setIdIfNecessary(configuration);
     VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(configuration, operation);
+
+    if (status == DEAD_OBJECT) {
+        if (restoreTrack_l("applyVolumeShaper") == OK) {
+            status = mAudioTrack->applyVolumeShaper(configuration, operation);
+        }
+    }
     if (status >= 0) {
         // save VolumeShaper for restore
         mVolumeHandler->applyVolumeShaper(configuration, operation);
+        if (mState == STATE_ACTIVE || mState == STATE_STOPPING) {
+            mVolumeHandler->setStarted();
+        }
+    } else {
+        // warn only if not an expected restore failure.
+        ALOGW_IF(!((isOffloadedOrDirect_l() || mDoNotReconnect) && status == DEAD_OBJECT),
+                "applyVolumeShaper failed: %d", status);
     }
     return status;
 }
 
 sp<VolumeShaper::State> AudioTrack::getVolumeShaperState(int id)
 {
-    // TODO: To properly restore the AudioTrack
-    // we will need to save the last state in AudioTrackShared.
     AutoMutex lock(mLock);
-    return mAudioTrack->getVolumeShaperState(id);
+    sp<VolumeShaper::State> state = mAudioTrack->getVolumeShaperState(id);
+    if (state.get() == nullptr && (mCblk->mFlags & CBLK_INVALID) != 0) {
+        if (restoreTrack_l("getVolumeShaperState") == OK) {
+            state = mAudioTrack->getVolumeShaperState(id);
+        }
+    }
+    return state;
 }
 
 status_t AudioTrack::getTimestamp(ExtendedTimestamp *timestamp)
diff --git a/media/libaudiohal/EffectBufferHalHidl.cpp b/media/libaudiohal/EffectBufferHalHidl.cpp
index d6a41a2..ef4097a 100644
--- a/media/libaudiohal/EffectBufferHalHidl.cpp
+++ b/media/libaudiohal/EffectBufferHalHidl.cpp
@@ -56,7 +56,8 @@
 }
 
 EffectBufferHalHidl::EffectBufferHalHidl(size_t size)
-        : mBufferSize(size), mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
+        : mBufferSize(size), mFrameCountChanged(false),
+          mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
     mHidlBuffer.id = makeUniqueId();
     mHidlBuffer.frameCount = 0;
 }
@@ -107,6 +108,13 @@
 void EffectBufferHalHidl::setFrameCount(size_t frameCount) {
     mHidlBuffer.frameCount = frameCount;
     mAudioBuffer.frameCount = frameCount;
+    mFrameCountChanged = true;
+}
+
+bool EffectBufferHalHidl::checkFrameCountChange() {
+    bool result = mFrameCountChanged;
+    mFrameCountChanged = false;
+    return result;
 }
 
 void EffectBufferHalHidl::setExternalData(void* external) {
diff --git a/media/libaudiohal/EffectBufferHalHidl.h b/media/libaudiohal/EffectBufferHalHidl.h
index 6e9fd0b..66a81c2 100644
--- a/media/libaudiohal/EffectBufferHalHidl.h
+++ b/media/libaudiohal/EffectBufferHalHidl.h
@@ -37,6 +37,7 @@
 
     virtual void setExternalData(void* external);
     virtual void setFrameCount(size_t frameCount);
+    virtual bool checkFrameCountChange();
 
     virtual void update();
     virtual void commit();
@@ -51,6 +52,7 @@
     static uint64_t makeUniqueId();
 
     const size_t mBufferSize;
+    bool mFrameCountChanged;
     void* mExternalData;
     AudioBuffer mHidlBuffer;
     sp<IMemory> mMemory;
diff --git a/media/libaudiohal/EffectBufferHalLocal.cpp b/media/libaudiohal/EffectBufferHalLocal.cpp
index 9fe2c7b..7951c8e 100644
--- a/media/libaudiohal/EffectBufferHalLocal.cpp
+++ b/media/libaudiohal/EffectBufferHalLocal.cpp
@@ -39,13 +39,13 @@
 
 EffectBufferHalLocal::EffectBufferHalLocal(size_t size)
         : mOwnBuffer(new uint8_t[size]),
-          mBufferSize(size),
+          mBufferSize(size), mFrameCountChanged(false),
           mAudioBuffer{0, {mOwnBuffer.get()}} {
 }
 
 EffectBufferHalLocal::EffectBufferHalLocal(void* external, size_t size)
         : mOwnBuffer(nullptr),
-          mBufferSize(size),
+          mBufferSize(size), mFrameCountChanged(false),
           mAudioBuffer{0, {external}} {
 }
 
@@ -62,6 +62,7 @@
 
 void EffectBufferHalLocal::setFrameCount(size_t frameCount) {
     mAudioBuffer.frameCount = frameCount;
+    mFrameCountChanged = true;
 }
 
 void EffectBufferHalLocal::setExternalData(void* external) {
@@ -69,6 +70,12 @@
     mAudioBuffer.raw = external;
 }
 
+bool EffectBufferHalLocal::checkFrameCountChange() {
+    bool result = mFrameCountChanged;
+    mFrameCountChanged = false;
+    return result;
+}
+
 void EffectBufferHalLocal::update() {
 }
 
diff --git a/media/libaudiohal/EffectBufferHalLocal.h b/media/libaudiohal/EffectBufferHalLocal.h
index 202d878..d2b624b 100644
--- a/media/libaudiohal/EffectBufferHalLocal.h
+++ b/media/libaudiohal/EffectBufferHalLocal.h
@@ -32,6 +32,7 @@
 
     virtual void setExternalData(void* external);
     virtual void setFrameCount(size_t frameCount);
+    virtual bool checkFrameCountChange();
 
     virtual void update();
     virtual void commit();
@@ -43,6 +44,7 @@
 
     std::unique_ptr<uint8_t[]> mOwnBuffer;
     const size_t mBufferSize;
+    bool mFrameCountChanged;
     audio_buffer_t mAudioBuffer;
 
     // Can not be constructed directly by clients.
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index 0babfda..7d61443 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -168,13 +168,20 @@
     return OK;
 }
 
+bool EffectHalHidl::needToResetBuffers() {
+    if (mBuffersChanged) return true;
+    bool inBufferFrameCountUpdated = mInBuffer->checkFrameCountChange();
+    bool outBufferFrameCountUpdated = mOutBuffer->checkFrameCountChange();
+    return inBufferFrameCountUpdated || outBufferFrameCountUpdated;
+}
+
 status_t EffectHalHidl::processImpl(uint32_t mqFlag) {
     if (mEffect == 0 || mInBuffer == 0 || mOutBuffer == 0) return NO_INIT;
     status_t status;
     if (!mStatusMQ && (status = prepareForProcessing()) != OK) {
         return status;
     }
-    if (mBuffersChanged && (status = setProcessBuffers()) != OK) {
+    if (needToResetBuffers() && (status = setProcessBuffers()) != OK) {
         return status;
     }
     // The data is already in the buffers, just need to flush it and wake up the server side.
diff --git a/media/libaudiohal/EffectHalHidl.h b/media/libaudiohal/EffectHalHidl.h
index c8db36f..0d011aa 100644
--- a/media/libaudiohal/EffectHalHidl.h
+++ b/media/libaudiohal/EffectHalHidl.h
@@ -92,6 +92,7 @@
 
     status_t getConfigImpl(uint32_t cmdCode, uint32_t *replySize, void *pReplyData);
     status_t prepareForProcessing();
+    bool needToResetBuffers();
     status_t processImpl(uint32_t mqFlag);
     status_t setConfigImpl(
             uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
diff --git a/media/libaudiohal/include/EffectBufferHalInterface.h b/media/libaudiohal/include/EffectBufferHalInterface.h
index 6fa7940..e862f6e 100644
--- a/media/libaudiohal/include/EffectBufferHalInterface.h
+++ b/media/libaudiohal/include/EffectBufferHalInterface.h
@@ -39,6 +39,8 @@
 
     virtual void setExternalData(void* external) = 0;
     virtual void setFrameCount(size_t frameCount) = 0;
+    virtual bool checkFrameCountChange() = 0;  // returns whether frame count has been updated
+                                               // since the last call to this method
 
     virtual void update() = 0;  // copies data from the external buffer, noop for allocated buffers
     virtual void commit() = 0;  // copies data to the external buffer, noop for allocated buffers
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index a7d9f0f..05b726a 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -302,6 +302,12 @@
 void AudioMixer::track_t::unprepareForDownmix() {
     ALOGV("AudioMixer::unprepareForDownmix(%p)", this);
 
+    if (mPostDownmixReformatBufferProvider != nullptr) {
+        // release any buffers held by the mPostDownmixReformatBufferProvider
+        // before deallocating the downmixerBufferProvider.
+        mPostDownmixReformatBufferProvider->reset();
+    }
+
     mDownmixRequiresFormat = AUDIO_FORMAT_INVALID;
     if (downmixerBufferProvider != NULL) {
         // this track had previously been configured with a downmixer, delete it
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index b082654..cba5cf5 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -2029,12 +2029,23 @@
     ALOGV("setVolume");
     t->setVolume(mLeftVolume, mRightVolume);
 
-    // Dispatch any queued VolumeShapers when the track was not open.
-    mVolumeHandler->forall([&t](const sp<VolumeShaper::Configuration> &configuration,
-            const sp<VolumeShaper::Operation> &operation) -> VolumeShaper::Status {
-        return t->applyVolumeShaper(configuration, operation);
+    // Restore VolumeShapers for the MediaPlayer in case the track was recreated
+    // due to an output sink error (e.g. offload to non-offload switch).
+    mVolumeHandler->forall([&t](const VolumeShaper &shaper) -> VolumeShaper::Status {
+        sp<VolumeShaper::Operation> operationToEnd =
+                new VolumeShaper::Operation(shaper.mOperation);
+        // TODO: Ideally we would restore to the exact xOffset position
+        // as returned by getVolumeShaperState(), but we don't have that
+        // information when restoring at the client unless we periodically poll
+        // the server or create shared memory state.
+        //
+        // For now, we simply advance to the end of the VolumeShaper effect
+        // if it has been started.
+        if (shaper.isStarted()) {
+            operationToEnd->setXOffset(1.f);
+        }
+        return t->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
     });
-    mVolumeHandler->reset(); // After dispatching, clear VolumeShaper queue.
 
     mSampleRateHz = sampleRate;
     mFlags = flags;
@@ -2075,7 +2086,11 @@
     if (mTrack != 0) {
         mTrack->setVolume(mLeftVolume, mRightVolume);
         mTrack->setAuxEffectSendLevel(mSendLevel);
-        return mTrack->start();
+        status_t status = mTrack->start();
+        if (status == NO_ERROR) {
+            mVolumeHandler->setStarted();
+        }
+        return status;
     }
     return NO_INIT;
 }
@@ -2279,13 +2294,20 @@
     Mutex::Autolock lock(mLock);
     ALOGV("AudioOutput::applyVolumeShaper");
 
-    // We take ownership of the VolumeShaper if set before the track is created.
     mVolumeHandler->setIdIfNecessary(configuration);
+
+    VolumeShaper::Status status;
     if (mTrack != 0) {
-        return mTrack->applyVolumeShaper(configuration, operation);
+        status = mTrack->applyVolumeShaper(configuration, operation);
+        if (status >= 0) {
+            (void)mVolumeHandler->applyVolumeShaper(configuration, operation);
+            // TODO: start on exact AudioTrack state (STATE_ACTIVE || STATE_STOPPING)
+            mVolumeHandler->setStarted();
+        }
     } else {
-        return mVolumeHandler->applyVolumeShaper(configuration, operation);
+        status = mVolumeHandler->applyVolumeShaper(configuration, operation);
     }
+    return status;
 }
 
 sp<VolumeShaper::State> MediaPlayerService::AudioOutput::getVolumeShaperState(int id)
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index ba45b98..fb1f42b 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1954,12 +1954,12 @@
     size_t psshsize;
 
     if (!mFileMeta->findData(kKeyPssh, &type, &pssh, &psshsize)) {
-        ALOGE("checkDrmInfo: No PSSH");
+        ALOGV("checkDrmInfo: No PSSH");
         return OK; // source without DRM info
     }
 
     Parcel parcel;
-    NuPlayerDrm::retrieveDrmInfo(pssh, psshsize, mMimes, &parcel);
+    NuPlayerDrm::retrieveDrmInfo(pssh, psshsize, &parcel);
     ALOGV("checkDrmInfo: MEDIA_DRM_INFO PSSH size: %d  Parcel size: %d  objects#: %d",
           (int)psshsize, (int)parcel.dataSize(), (int)parcel.objectsCount());
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
index ce6cedc..b7c9db7 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.cpp
@@ -205,26 +205,8 @@
 }
 
 // Parcel has only private copy constructor so passing it in rather than returning
-void NuPlayerDrm::retrieveDrmInfo(const void *pssh, size_t psshsize,
-        const Vector<String8> &mimes_in, Parcel *parcel)
+void NuPlayerDrm::retrieveDrmInfo(const void *pssh, size_t psshsize, Parcel *parcel)
 {
-    // 0) Make mimes a vector of unique items while keeping the original order; video first
-    Vector<String8> mimes;
-    for (size_t j = 0; j < mimes_in.size(); j++) {
-        String8 mime = mimes_in[j];
-        bool exists = false;
-        for (size_t i = 0; i < mimes.size() && !exists; i++) {
-            if (mimes[i] == mime) {
-                exists = true;
-            }
-        } // for i
-
-        if (!exists) {
-            mimes.add(mime);
-        }
-    } // for j
-
-
     // 1) PSSH bytes
     parcel->writeUint32(psshsize);
     parcel->writeByteArray(psshsize, (const uint8_t*)pssh);
@@ -242,18 +224,6 @@
         ALOGV("retrieveDrmInfo: MEDIA_DRM_INFO  supportedScheme[%zu] %s", i,
                 uuid.toHexString().string());
     }
-
-    // TODO: remove mimes after it's removed from Java DrmInfo
-    // 3) mimes
-    parcel->writeUint32(mimes.size());
-    for (size_t i = 0; i < mimes.size(); i++) {
-        // writing as String16 so the Java framework side can unpack it to Java String
-        String16 mime(mimes[i]);
-        parcel->writeString16(mime);
-
-        ALOGV("retrieveDrmInfo: MEDIA_DRM_INFO  MIME[%zu] %s",
-                i, mimes[i].string());
-    }
 }
 
 ////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
index 6704bd1..6b8a2d9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDrm.h
@@ -84,8 +84,7 @@
         static sp<ICrypto> createCryptoAndPlugin(const uint8_t uuid[16],
                 const Vector<uint8_t> &drmSessionId, status_t &status);
         // Parcel has only private copy constructor so passing it in rather than returning
-        static void retrieveDrmInfo(const void *pssh, size_t psshsize,
-                const Vector<String8> &mimes_in, Parcel *parcel);
+        static void retrieveDrmInfo(const void *pssh, size_t psshsize, Parcel *parcel);
 
         ////////////////////////////////////////////////////////////////////////////////////////////
         /// Helpers for NuPlayerDecoder
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 8b91541..2877ba2 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -251,6 +251,7 @@
 
     virtual PortMode getPortMode(OMX_U32 portIndex);
 
+    virtual void stateExited();
     virtual bool onMessageReceived(const sp<AMessage> &msg);
 
     virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
@@ -553,6 +554,7 @@
       mTunneled(false),
       mDescribeColorAspectsIndex((OMX_INDEXTYPE)0),
       mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0),
+      mStateGeneration(0),
       mVendorExtensionsStatus(kExtensionsUnchecked) {
     mUninitializedState = new UninitializedState(this);
     mLoadedState = new LoadedState(this);
@@ -5322,6 +5324,10 @@
     return KEEP_BUFFERS;
 }
 
+void ACodec::BaseState::stateExited() {
+    ++mCodec->mStateGeneration;
+}
+
 bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatInputBufferFilled:
@@ -5398,6 +5404,12 @@
             break;
         }
 
+        case ACodec::kWhatForceStateTransition:
+        {
+            ALOGV("Already transitioned --- ignore");
+            break;
+        }
+
         default:
             return false;
     }
@@ -6988,7 +7000,6 @@
 
 void ACodec::ExecutingState::stateEntered() {
     ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
-
     mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
     mCodec->processDeferredMessages();
 }
@@ -7614,6 +7625,30 @@
     mCallback->onSignaledInputEOS(err);
 }
 
+void ACodec::forceStateTransition(int generation) {
+    if (generation != mStateGeneration) {
+        ALOGV("Ignoring stale force state transition message: #%d (now #%d)",
+                generation, mStateGeneration);
+        return;
+    }
+    ALOGE("State machine stuck");
+    // Error must have already been signalled to the client.
+
+    // Deferred messages will be handled at LoadedState at the end of the
+    // transition.
+    mShutdownInProgress = true;
+    // No shutdown complete callback at the end of the transition.
+    mExplicitShutdown = false;
+    mKeepComponentAllocated = true;
+
+    status_t err = mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle);
+    if (err != OK) {
+        // TODO: do some recovery here.
+    } else {
+        changeState(mExecutingToIdleState);
+    }
+}
+
 bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
     mCodec->onFrameRendered(mediaTimeUs, systemNano);
     return true;
@@ -7680,7 +7715,14 @@
 
     switch (msg->what()) {
         case kWhatFlush:
-        case kWhatShutdown:
+        case kWhatShutdown: {
+            if (mCodec->mFatalError) {
+                sp<AMessage> msg = new AMessage(ACodec::kWhatForceStateTransition, mCodec);
+                msg->setInt32("generation", mCodec->mStateGeneration);
+                msg->post(3000000);
+            }
+            // fall-through
+        }
         case kWhatResume:
         case kWhatSetParameters:
         {
@@ -7693,6 +7735,16 @@
             break;
         }
 
+        case kWhatForceStateTransition:
+        {
+            int32_t generation = 0;
+            CHECK(msg->findInt32("generation", &generation));
+            mCodec->forceStateTransition(generation);
+
+            handled = true;
+            break;
+        }
+
         default:
             handled = BaseState::onMessageReceived(msg);
             break;
@@ -7752,15 +7804,7 @@
 
                 if (err != OK) {
                     mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
-
-                    // This is technically not correct, but appears to be
-                    // the only way to free the component instance.
-                    // Controlled transitioning from excecuting->idle
-                    // and idle->loaded seem impossible probably because
-                    // the output port never finishes re-enabling.
-                    mCodec->mShutdownInProgress = true;
-                    mCodec->mKeepComponentAllocated = false;
-                    mCodec->changeState(mCodec->mLoadedState);
+                    ALOGE("Error occurred while disabling the output port");
                 }
 
                 return true;
@@ -7785,7 +7829,7 @@
         }
 
         default:
-            return false;
+            return BaseState::onOMXEvent(event, data1, data2);
     }
 }
 
@@ -7987,6 +8031,11 @@
         case kWhatShutdown:
         {
             mCodec->deferMessage(msg);
+            if (mCodec->mFatalError) {
+                sp<AMessage> msg = new AMessage(ACodec::kWhatForceStateTransition, mCodec);
+                msg->setInt32("generation", mCodec->mStateGeneration);
+                msg->post(3000000);
+            }
             break;
         }
 
@@ -7997,6 +8046,16 @@
             break;
         }
 
+        case kWhatForceStateTransition:
+        {
+            int32_t generation = 0;
+            CHECK(msg->findInt32("generation", &generation));
+            mCodec->forceStateTransition(generation);
+
+            handled = true;
+            break;
+        }
+
         default:
             handled = BaseState::onMessageReceived(msg);
             break;
diff --git a/media/libstagefright/include/ACodec.h b/media/libstagefright/include/ACodec.h
index 06ee0e8..22b8657 100644
--- a/media/libstagefright/include/ACodec.h
+++ b/media/libstagefright/include/ACodec.h
@@ -150,6 +150,7 @@
         kWhatSubmitOutputMetadataBufferIfEOS = 'subm',
         kWhatOMXDied                 = 'OMXd',
         kWhatReleaseCodecInstance    = 'relC',
+        kWhatForceStateTransition    = 'fstt',
     };
 
     enum {
@@ -305,6 +306,8 @@
 
     std::shared_ptr<ACodecBufferChannel> mBufferChannel;
 
+    int32_t mStateGeneration;
+
     enum {
         kExtensionsUnchecked,
         kExtensionsNone,
@@ -569,6 +572,9 @@
     // Send EOS on input stream.
     void onSignalEndOfInputStream();
 
+    // Force EXEC->IDLE->LOADED shutdown sequence if not stale.
+    void forceStateTransition(int generation);
+
     DISALLOW_EVIL_CONSTRUCTORS(ACodec);
 };
 
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
new file mode 100644
index 0000000..0e37af9
--- /dev/null
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <ios>
+#include <list>
+
+#include <android-base/logging.h>
+
+#include "Conversion.h"
+#include "OmxStore.h"
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace omx {
+namespace V1_0 {
+namespace implementation {
+
+OmxStore::OmxStore() {
+}
+
+OmxStore::~OmxStore() {
+}
+
+Return<void> OmxStore::listServiceAttributes(listServiceAttributes_cb _hidl_cb) {
+    _hidl_cb(toStatus(NO_ERROR), hidl_vec<ServiceAttribute>());
+    return Void();
+}
+
+Return<void> OmxStore::getNodePrefix(getNodePrefix_cb _hidl_cb) {
+    _hidl_cb(hidl_string());
+    return Void();
+}
+
+Return<void> OmxStore::listRoles(listRoles_cb _hidl_cb) {
+    _hidl_cb(hidl_vec<RoleInfo>());
+    return Void();
+}
+
+Return<sp<IOmx>> OmxStore::getOmx(hidl_string const& omxName) {
+    return IOmx::tryGetService(omxName);
+}
+
+// Methods from ::android::hidl::base::V1_0::IBase follow.
+
+IOmxStore* HIDL_FETCH_IOmxStore(const char* /* name */) {
+    return new OmxStore();
+}
+
+}  // namespace implementation
+}  // namespace V1_0
+}  // namespace omx
+}  // namespace media
+}  // namespace hardware
+}  // namespace android
diff --git a/media/libstagefright/omx/1.0/OmxStore.h b/media/libstagefright/omx/1.0/OmxStore.h
new file mode 100644
index 0000000..f377f5a
--- /dev/null
+++ b/media/libstagefright/omx/1.0/OmxStore.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2017, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_MEDIA_OMX_V1_0_OMXSTORE_H
+#define ANDROID_HARDWARE_MEDIA_OMX_V1_0_OMXSTORE_H
+
+#include <hidl/MQDescriptor.h>
+#include <hidl/Status.h>
+
+#include <android/hardware/media/omx/1.0/IOmxStore.h>
+
+namespace android {
+namespace hardware {
+namespace media {
+namespace omx {
+namespace V1_0 {
+namespace implementation {
+
+using ::android::hardware::media::omx::V1_0::IOmxStore;
+using ::android::hardware::media::omx::V1_0::IOmx;
+using ::android::hardware::media::omx::V1_0::Status;
+using ::android::hidl::base::V1_0::IBase;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+using ::android::sp;
+using ::android::wp;
+
+struct OmxStore : public IOmxStore {
+    OmxStore();
+    virtual ~OmxStore();
+
+    // Methods from IOmx
+    Return<void> listServiceAttributes(listServiceAttributes_cb) override;
+    Return<void> getNodePrefix(getNodePrefix_cb) override;
+    Return<void> listRoles(listRoles_cb) override;
+    Return<sp<IOmx>> getOmx(hidl_string const&) override;
+};
+
+extern "C" IOmxStore* HIDL_FETCH_IOmxStore(const char* name);
+
+}  // namespace implementation
+}  // namespace V1_0
+}  // namespace omx
+}  // namespace media
+}  // namespace hardware
+}  // namespace android
+
+#endif  // ANDROID_HARDWARE_MEDIA_OMX_V1_0_OMXSTORE_H
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index 90333ef..29e2ccc 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -15,6 +15,7 @@
         SoftVideoDecoderOMXComponent.cpp   \
         SoftVideoEncoderOMXComponent.cpp   \
         1.0/Omx.cpp                        \
+        1.0/OmxStore.cpp                   \
         1.0/WGraphicBufferProducer.cpp     \
         1.0/WProducerListener.cpp          \
         1.0/WGraphicBufferSource.cpp       \
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index f8ad2f6..103a3e9 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -766,7 +766,6 @@
 status_t GraphicBufferSource::submitBuffer_l(const VideoBuffer &item) {
     CHECK(!mFreeCodecBuffers.empty());
     IOMX::buffer_id codecBufferId = *mFreeCodecBuffers.begin();
-    mFreeCodecBuffers.erase(mFreeCodecBuffers.begin());
 
     ALOGV("submitBuffer_l [slot=%d, bufferId=%d]", item.mBuffer->getSlot(), codecBufferId);
 
@@ -797,6 +796,8 @@
         return err;
     }
 
+    mFreeCodecBuffers.erase(mFreeCodecBuffers.begin());
+
     ssize_t cbix = mSubmittedCodecBuffers.add(codecBufferId, buffer);
     ALOGV("emptyGraphicBuffer succeeded, bufferId=%u@%zd bufhandle=%p",
             codecBufferId, cbix, graphicBuffer->handle);
@@ -815,7 +816,6 @@
         return;
     }
     IOMX::buffer_id codecBufferId = *mFreeCodecBuffers.begin();
-    mFreeCodecBuffers.erase(mFreeCodecBuffers.begin());
 
     // We reject any additional incoming graphic buffers. There is no acquired buffer used for EOS
     status_t err = mOMXNode->emptyBuffer(
@@ -823,6 +823,7 @@
     if (err != OK) {
         ALOGW("emptyDirectBuffer EOS failed: 0x%x", err);
     } else {
+        mFreeCodecBuffers.erase(mFreeCodecBuffers.begin());
         ssize_t cbix = mSubmittedCodecBuffers.add(codecBufferId, nullptr);
         ALOGV("submitEndOfInputStream_l: buffer submitted, bufferId=%u@%zd", codecBufferId, cbix);
         mEndOfStreamSent = true;
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 95fdf36..60312da 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -178,7 +178,7 @@
         return AMEDIA_ERROR_INVALID_OBJECT;
     }
 
-    if ((mUsage0 & AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN) == 0) {
+    if ((mUsage0 & AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN) == 0) {
         ALOGE("%s: AImage %p does not have any software read usage bits set, usage=%" PRIu64 "",
               __FUNCTION__, this, mUsage0);
         return AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index c449611..7e41d28 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -587,7 +587,7 @@
         /*out*/AImageReader** reader) {
     ALOGV("%s", __FUNCTION__);
     return AImageReader_newWithUsage(
-            width, height, format, AHARDWAREBUFFER_USAGE0_CPU_READ_OFTEN, 0, maxImages, reader);
+            width, height, format, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, 0, maxImages, reader);
 }
 
 EXPORT
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 3665875..8d018d1 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8296,22 +8296,24 @@
             mEffectChains[0]->setVolume_l(&vol, &vol);
             volume = (float)vol / (1 << 24);
         }
-
-        mOutput->stream->setVolume(volume, volume);
-
-        sp<MmapStreamCallback> callback = mCallback.promote();
-        if (callback != 0) {
-            int channelCount;
-            if (isOutput()) {
-                channelCount = audio_channel_count_from_out_mask(mChannelMask);
+        // Try to use HW volume control and fall back to SW control if not implemented
+        if (mOutput->stream->setVolume(volume, volume) != NO_ERROR) {
+            sp<MmapStreamCallback> callback = mCallback.promote();
+            if (callback != 0) {
+                int channelCount;
+                if (isOutput()) {
+                    channelCount = audio_channel_count_from_out_mask(mChannelMask);
+                } else {
+                    channelCount = audio_channel_count_from_in_mask(mChannelMask);
+                }
+                Vector<float> values;
+                for (int i = 0; i < channelCount; i++) {
+                    values.add(volume);
+                }
+                callback->onVolumeChanged(mChannelMask, values);
             } else {
-                channelCount = audio_channel_count_from_in_mask(mChannelMask);
+                ALOGW("Could not set MMAP stream volume: no volume callback!");
             }
-            Vector<float> values;
-            for (int i = 0; i < channelCount; i++) {
-                values.add(volume);
-            }
-            callback->onVolumeChanged(mChannelMask, values);
         }
     }
 }
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 733a78e..6e21126 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -41,11 +41,7 @@
     {
         SharedParameters::Lock l(client->getParameters());
 
-        if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
-            mUsePartialResult = (mNumPartialResults > 1);
-        } else {
-            mUsePartialResult = l.mParameters.quirks.partialResults;
-        }
+        mUsePartialResult = (mNumPartialResults > 1);
 
         // Initialize starting 3A state
         m3aState.afTriggerId = l.mParameters.afTriggerCounter;
@@ -76,16 +72,7 @@
 
     bool isPartialResult = false;
     if (mUsePartialResult) {
-        if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
-            isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
-        } else {
-            camera_metadata_entry_t entry;
-            entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
-            if (entry.count > 0 &&
-                    entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
-                isPartialResult = true;
-            }
-        }
+        isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
     }
 
     if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
@@ -291,16 +278,8 @@
     gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
             &pendingState.awbState, frameNumber, cameraId);
 
-    if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
-        pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
-        pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
-    } else {
-        gotAllStates &= updatePendingState<int32_t>(metadata,
-                ANDROID_CONTROL_AF_TRIGGER_ID, &pendingState.afTriggerId, frameNumber, cameraId);
-
-        gotAllStates &= updatePendingState<int32_t>(metadata,
-            ANDROID_CONTROL_AE_PRECAPTURE_ID, &pendingState.aeTriggerId, frameNumber, cameraId);
-    }
+    pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
+    pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
 
     if (!gotAllStates) {
         // If not all states are received, put the pending state to mPending3AStates.
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 1c78a08..a305bc7 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -2854,32 +2854,14 @@
     }
     sizes->clear();
 
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-        Vector<StreamConfiguration> scs = getStreamConfigurations();
-        for (size_t i=0; i < scs.size(); i++) {
-            const StreamConfiguration &sc = scs[i];
-            if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
-                    sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
-                    sc.width <= limit.width && sc.height <= limit.height) {
-                Size sz = {sc.width, sc.height};
-                sizes->push(sz);
-            }
-        }
-    } else {
-        const size_t SIZE_COUNT = sizeof(Size) / sizeof(int);
-        camera_metadata_ro_entry_t availableProcessedSizes =
-            staticInfo(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, SIZE_COUNT);
-        if (availableProcessedSizes.count < SIZE_COUNT) return BAD_VALUE;
-
-        Size filteredSize;
-        for (size_t i = 0; i < availableProcessedSizes.count; i += SIZE_COUNT) {
-            filteredSize.width = availableProcessedSizes.data.i32[i];
-            filteredSize.height = availableProcessedSizes.data.i32[i+1];
-                // Need skip the preview sizes that are too large.
-                if (filteredSize.width <= limit.width &&
-                        filteredSize.height <= limit.height) {
-                    sizes->push(filteredSize);
-                }
+    Vector<StreamConfiguration> scs = getStreamConfigurations();
+    for (size_t i=0; i < scs.size(); i++) {
+        const StreamConfiguration &sc = scs[i];
+        if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+                sc.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
+                sc.width <= limit.width && sc.height <= limit.height) {
+            Size sz = {sc.width, sc.height};
+            sizes->push(sz);
         }
     }
 
@@ -2934,10 +2916,6 @@
     const int STREAM_HEIGHT_OFFSET = 2;
     const int STREAM_IS_INPUT_OFFSET = 3;
     Vector<StreamConfiguration> scs;
-    if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
-        ALOGE("StreamConfiguration is only valid after device HAL 3.2!");
-        return scs;
-    }
 
     camera_metadata_ro_entry_t availableStreamConfigs =
                 staticInfo(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
@@ -2953,37 +2931,10 @@
 }
 
 int64_t Parameters::getJpegStreamMinFrameDurationNs(Parameters::Size size) {
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-        return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
-    } else {
-        Vector<Size> availableJpegSizes = getAvailableJpegSizes();
-        size_t streamIdx = availableJpegSizes.size();
-        for (size_t i = 0; i < availableJpegSizes.size(); i++) {
-            if (availableJpegSizes[i].width == size.width &&
-                    availableJpegSizes[i].height == size.height) {
-                streamIdx = i;
-                break;
-            }
-        }
-        if (streamIdx != availableJpegSizes.size()) {
-            camera_metadata_ro_entry_t jpegMinDurations =
-                    staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS);
-            if (streamIdx < jpegMinDurations.count) {
-                return jpegMinDurations.data.i64[streamIdx];
-            }
-        }
-    }
-    ALOGE("%s: cannot find min frame duration for jpeg size %dx%d",
-            __FUNCTION__, size.width, size.height);
-    return -1;
+    return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
 }
 
 int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
-    if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
-        ALOGE("Min frame duration for HAL 3.1 or lower is not supported");
-        return -1;
-    }
-
     const int STREAM_DURATION_SIZE = 4;
     const int STREAM_FORMAT_OFFSET = 0;
     const int STREAM_WIDTH_OFFSET = 1;
@@ -3005,11 +2956,6 @@
 }
 
 bool Parameters::isFpsSupported(const Vector<Size> &sizes, int format, int32_t fps) {
-    // Skip the check for older HAL version, as the min duration is not supported.
-    if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
-        return true;
-    }
-
     // Get min frame duration for each size and check if the given fps range can be supported.
     for (size_t i = 0 ; i < sizes.size(); i++) {
         int64_t minFrameDuration = getMinFrameDurationNs(sizes[i], format);
@@ -3030,48 +2976,29 @@
 
 SortedVector<int32_t> Parameters::getAvailableOutputFormats() {
     SortedVector<int32_t> outputFormats; // Non-duplicated output formats
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-        Vector<StreamConfiguration> scs = getStreamConfigurations();
-        for (size_t i = 0; i < scs.size(); i++) {
-            const StreamConfiguration &sc = scs[i];
-            if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
-                outputFormats.add(sc.format);
-            }
-        }
-    } else {
-        camera_metadata_ro_entry_t availableFormats = staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
-        for (size_t i = 0; i < availableFormats.count; i++) {
-            outputFormats.add(availableFormats.data.i32[i]);
+    Vector<StreamConfiguration> scs = getStreamConfigurations();
+    for (size_t i = 0; i < scs.size(); i++) {
+        const StreamConfiguration &sc = scs[i];
+        if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
+            outputFormats.add(sc.format);
         }
     }
+
     return outputFormats;
 }
 
 Vector<Parameters::Size> Parameters::getAvailableJpegSizes() {
     Vector<Parameters::Size> jpegSizes;
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-        Vector<StreamConfiguration> scs = getStreamConfigurations();
-        for (size_t i = 0; i < scs.size(); i++) {
-            const StreamConfiguration &sc = scs[i];
-            if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
-                    sc.format == HAL_PIXEL_FORMAT_BLOB) {
-                Size sz = {sc.width, sc.height};
-                jpegSizes.add(sz);
-            }
-        }
-    } else {
-        const int JPEG_SIZE_ENTRY_COUNT = 2;
-        const int WIDTH_OFFSET = 0;
-        const int HEIGHT_OFFSET = 1;
-        camera_metadata_ro_entry_t availableJpegSizes =
-            staticInfo(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
-        for (size_t i = 0; i < availableJpegSizes.count; i+= JPEG_SIZE_ENTRY_COUNT) {
-            int width = availableJpegSizes.data.i32[i + WIDTH_OFFSET];
-            int height = availableJpegSizes.data.i32[i + HEIGHT_OFFSET];
-            Size sz = {width, height};
+    Vector<StreamConfiguration> scs = getStreamConfigurations();
+    for (size_t i = 0; i < scs.size(); i++) {
+        const StreamConfiguration &sc = scs[i];
+        if (sc.isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+                sc.format == HAL_PIXEL_FORMAT_BLOB) {
+            Size sz = {sc.width, sc.height};
             jpegSizes.add(sz);
         }
     }
+
     return jpegSizes;
 }
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index bf92a2b..d79e430 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -114,16 +114,11 @@
         }
 
         // Use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG for ZSL streaming case.
-        if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_0) {
-            if (params.useZeroShutterLag() && !params.recordingHint) {
-                res = device->createDefaultRequest(CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG,
-                        &mPreviewRequest);
-            } else {
-                res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
-                        &mPreviewRequest);
-            }
+        if (params.useZeroShutterLag() && !params.recordingHint) {
+            res = device->createDefaultRequest(
+                    CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG, &mPreviewRequest);
         } else {
-            res = device->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
+            res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW,
                     &mPreviewRequest);
         }
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 4f788ae..44540bb 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -307,11 +307,6 @@
     virtual status_t prepare(int maxCount, int streamId) = 0;
 
     /**
-     * Get the HAL device version.
-     */
-    virtual uint32_t getDeviceVersion() = 0;
-
-    /**
      * Set the deferred consumer surface and finish the rest of the stream configuration.
      */
     virtual status_t setConsumerSurfaces(int streamId,
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 9e78f88..41c953b 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -32,8 +32,7 @@
     mDevice(device),
     mNumPartialResults(1) {
     sp<CameraDeviceBase> cameraDevice = device.promote();
-    if (cameraDevice != 0 &&
-            cameraDevice->getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
+    if (cameraDevice != 0) {
         CameraMetadata staticInfo = cameraDevice->info();
         camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
         if (entry.count > 0) {
@@ -171,18 +170,8 @@
     camera_metadata_ro_entry_t entry;
 
     // Check if this result is partial.
-    bool isPartialResult = false;
-    if (device->getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
-        isPartialResult = result.mResultExtras.partialResultCount < mNumPartialResults;
-    } else {
-        entry = result.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
-        if (entry.count != 0 &&
-                entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
-            ALOGV("%s: Camera %s: This is a partial result",
-                    __FUNCTION__, device->getId().string());
-            isPartialResult = true;
-        }
-    }
+    bool isPartialResult =
+            result.mResultExtras.partialResultCount < mNumPartialResults;
 
     // TODO: instead of getting requestID from CameraMetadata, we should get it
     // from CaptureResultExtras. This will require changing Camera2Device.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b64488c..fb303bf 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -163,7 +163,6 @@
 
     /** Everything is good to go */
 
-    mDeviceVersion = device->common.version;
     mDeviceInfo = info.static_camera_characteristics;
     mInterface = std::make_unique<HalInterface>(device);
 
@@ -231,9 +230,6 @@
         // Don't use the queue onwards.
     }
 
-    // TODO: camera service will absorb 3_2/3_3/3_4 differences in the future
-    //       for now use 3_4 to keep legacy devices working
-    mDeviceVersion = CAMERA_DEVICE_API_VERSION_3_4;
     mInterface = std::make_unique<HalInterface>(session, queue);
     std::string providerType;
     mVendorTagId = manager->getProviderTagIdLocked(mId.string());
@@ -262,17 +258,8 @@
 
     mTagMonitor.initialize(mVendorTagId);
 
-    bool aeLockAvailable = false;
-    camera_metadata_entry aeLockAvailableEntry = mDeviceInfo.find(
-            ANDROID_CONTROL_AE_LOCK_AVAILABLE);
-    if (aeLockAvailableEntry.count > 0) {
-        aeLockAvailable = (aeLockAvailableEntry.data.u8[0] ==
-                ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE);
-    }
-
     /** Start up request queue thread */
-    mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get(), mDeviceVersion,
-            aeLockAvailable);
+    mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get());
     res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -284,13 +271,6 @@
 
     mPreparerThread = new PreparerThread();
 
-    // Determine whether we need to derive sensitivity boost values for older devices.
-    // If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control
-    // be listed (as the default value 100)
-    if (mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
-        mDerivePostRawSensKey = true;
-    }
-
     internalUpdateStatusLocked(STATUS_UNCONFIGURED);
     mNextStreamId = 0;
     mDummyStreamId = NO_STREAM;
@@ -306,19 +286,11 @@
     }
 
     // Will the HAL be sending in early partial result metadata?
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-        camera_metadata_entry partialResultsCount =
-                mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
-        if (partialResultsCount.count > 0) {
-            mNumPartialResults = partialResultsCount.data.i32[0];
-            mUsePartialResult = (mNumPartialResults > 1);
-        }
-    } else {
-        camera_metadata_entry partialResultsQuirk =
-                mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
-        if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) {
-            mUsePartialResult = true;
-        }
+    camera_metadata_entry partialResultsCount =
+            mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
+    if (partialResultsCount.count > 0) {
+        mNumPartialResults = partialResultsCount.data.i32[0];
+        mUsePartialResult = (mNumPartialResults > 1);
     }
 
     camera_metadata_entry configs =
@@ -434,48 +406,32 @@
 
 Camera3Device::Size Camera3Device::getMaxJpegResolution() const {
     int32_t maxJpegWidth = 0, maxJpegHeight = 0;
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-        const int STREAM_CONFIGURATION_SIZE = 4;
-        const int STREAM_FORMAT_OFFSET = 0;
-        const int STREAM_WIDTH_OFFSET = 1;
-        const int STREAM_HEIGHT_OFFSET = 2;
-        const int STREAM_IS_INPUT_OFFSET = 3;
-        camera_metadata_ro_entry_t availableStreamConfigs =
-                mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
-        if (availableStreamConfigs.count == 0 ||
-                availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
-            return Size(0, 0);
-        }
+    const int STREAM_CONFIGURATION_SIZE = 4;
+    const int STREAM_FORMAT_OFFSET = 0;
+    const int STREAM_WIDTH_OFFSET = 1;
+    const int STREAM_HEIGHT_OFFSET = 2;
+    const int STREAM_IS_INPUT_OFFSET = 3;
+    camera_metadata_ro_entry_t availableStreamConfigs =
+            mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+    if (availableStreamConfigs.count == 0 ||
+            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+        return Size(0, 0);
+    }
 
-        // Get max jpeg size (area-wise).
-        for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
-            int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
-            int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
-            int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
-            int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
-            if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
-                    && format == HAL_PIXEL_FORMAT_BLOB &&
-                    (width * height > maxJpegWidth * maxJpegHeight)) {
-                maxJpegWidth = width;
-                maxJpegHeight = height;
-            }
-        }
-    } else {
-        camera_metadata_ro_entry availableJpegSizes =
-                mDeviceInfo.find(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
-        if (availableJpegSizes.count == 0 || availableJpegSizes.count % 2 != 0) {
-            return Size(0, 0);
-        }
-
-        // Get max jpeg size (area-wise).
-        for (size_t i = 0; i < availableJpegSizes.count; i += 2) {
-            if ((availableJpegSizes.data.i32[i] * availableJpegSizes.data.i32[i + 1])
-                    > (maxJpegWidth * maxJpegHeight)) {
-                maxJpegWidth = availableJpegSizes.data.i32[i];
-                maxJpegHeight = availableJpegSizes.data.i32[i + 1];
-            }
+    // Get max jpeg size (area-wise).
+    for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+                && format == HAL_PIXEL_FORMAT_BLOB &&
+                (width * height > maxJpegWidth * maxJpegHeight)) {
+            maxJpegWidth = width;
+            maxJpegHeight = height;
         }
     }
+
     return Size(maxJpegWidth, maxJpegHeight);
 }
 
@@ -497,31 +453,6 @@
     return measured;
 }
 
-/**
- * Map Android N dataspace definitions back to Android M definitions, for
- * use with HALv3.3 or older.
- *
- * Only map where correspondences exist, and otherwise preserve the value.
- */
-android_dataspace Camera3Device::mapToLegacyDataspace(android_dataspace dataSpace) {
-    switch (dataSpace) {
-        case HAL_DATASPACE_V0_SRGB_LINEAR:
-            return HAL_DATASPACE_SRGB_LINEAR;
-        case HAL_DATASPACE_V0_SRGB:
-            return HAL_DATASPACE_SRGB;
-        case HAL_DATASPACE_V0_JFIF:
-            return HAL_DATASPACE_JFIF;
-        case HAL_DATASPACE_V0_BT601_625:
-            return HAL_DATASPACE_BT601_625;
-        case HAL_DATASPACE_V0_BT601_525:
-            return HAL_DATASPACE_BT601_525;
-        case HAL_DATASPACE_V0_BT709:
-            return HAL_DATASPACE_BT709;
-        default:
-            return dataSpace;
-    }
-}
-
 hardware::graphics::common::V1_0::PixelFormat Camera3Device::mapToPixelFormat(
         int frameworkFormat) {
     return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
@@ -1365,32 +1296,17 @@
     assert(mStatus != STATUS_ACTIVE);
 
     sp<Camera3OutputStream> newStream;
-    // Overwrite stream set id to invalid for HAL3.2 or lower, as buffer manager does support
-    // such devices.
-    if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2) {
-        streamSetId = CAMERA3_STREAM_SET_ID_INVALID;
-    }
 
     if (consumers.size() == 0 && !hasDeferredConsumer) {
         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
         return BAD_VALUE;
     }
-    // HAL3.1 doesn't support deferred consumer stream creation as it requires buffer registration
-    // which requires a consumer surface to be available.
-    if (hasDeferredConsumer && mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
-        ALOGE("HAL3.1 doesn't support deferred consumer stream creation");
-        return BAD_VALUE;
-    }
 
     if (hasDeferredConsumer && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
         return BAD_VALUE;
     }
 
-    // Use legacy dataspace values for older HALs
-    if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_3) {
-        dataSpace = mapToLegacyDataspace(dataSpace);
-    }
     if (format == HAL_PIXEL_FORMAT_BLOB) {
         ssize_t blobBufferSize;
         if (dataSpace != HAL_DATASPACE_DEPTH) {
@@ -1433,15 +1349,7 @@
     }
     newStream->setStatusTracker(mStatusTracker);
 
-    /**
-     * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs ( < HAL3.2)
-     * requires buffers to be statically allocated for internal static buffer registration, while
-     * the buffers provided by buffer manager are really dynamically allocated. For HAL3.2, because
-     * not all HAL implementation supports dynamic buffer registeration, exlude it as well.
-     */
-    if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
-        newStream->setBufferManager(mBufferManager);
-    }
+    newStream->setBufferManager(mBufferManager);
 
     res = mOutputStreams.add(mNextStreamId, newStream);
     if (res < 0) {
@@ -1659,15 +1567,6 @@
     set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
     mRequestTemplateCache[templateId].acquire(rawRequest);
 
-    // Derive some new keys for backward compatibility
-    if (mDerivePostRawSensKey && !mRequestTemplateCache[templateId].exists(
-            ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
-        int32_t defaultBoost[1] = {100};
-        mRequestTemplateCache[templateId].update(
-                ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
-                defaultBoost, 1);
-    }
-
     *request = mRequestTemplateCache[templateId];
     return OK;
 }
@@ -1919,15 +1818,7 @@
         mRequestThread->clear(/*out*/frameNumber);
     }
 
-    status_t res;
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
-        res = mRequestThread->flush();
-    } else {
-        Mutex::Autolock l(mLock);
-        res = waitUntilDrainedLocked();
-    }
-
-    return res;
+    return mRequestThread->flush();
 }
 
 status_t Camera3Device::prepare(int streamId) {
@@ -1968,14 +1859,6 @@
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    // Teardown can only be accomplished on devices that don't require register_stream_buffers,
-    // since we cannot call register_stream_buffers except right after configure_streams.
-    if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
-        ALOGE("%s: Unable to tear down streams on device HAL v%x",
-                __FUNCTION__, mDeviceVersion);
-        return NO_INIT;
-    }
-
     sp<Camera3StreamInterface> stream;
     ssize_t outputStreamIdx = mOutputStreams.indexOfKey(streamId);
     if (outputStreamIdx == NAME_NOT_FOUND) {
@@ -2013,12 +1896,6 @@
     return OK;
 }
 
-uint32_t Camera3Device::getDeviceVersion() {
-    ATRACE_CALL();
-    Mutex::Autolock il(mInterfaceLock);
-    return mDeviceVersion;
-}
-
 /**
  * Methods called by subclasses
  */
@@ -2511,14 +2388,13 @@
 
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-        const AeTriggerCancelOverride_t &aeTriggerCancelOverride,
         bool hasAppCallback) {
     ATRACE_CALL();
     Mutex::Autolock l(mInFlightLock);
 
     ssize_t res;
     res = mInFlightMap.add(frameNumber, InFlightRequest(numBuffers, resultExtras, hasInput,
-            aeTriggerCancelOverride, hasAppCallback));
+            hasAppCallback));
     if (res < 0) return res;
 
     if (mInFlightMap.size() == 1) {
@@ -2603,8 +2479,8 @@
     }
 }
 
-void Camera3Device::insertResultLocked(CaptureResult *result, uint32_t frameNumber,
-            const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+void Camera3Device::insertResultLocked(CaptureResult *result,
+        uint32_t frameNumber) {
     if (result == nullptr) return;
 
     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
@@ -2623,8 +2499,6 @@
         return;
     }
 
-    overrideResultForPrecaptureCancel(&result->mMetadata, aeTriggerCancelOverride);
-
     // Valid result, insert into queue
     List<CaptureResult>::iterator queuedResult =
             mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
@@ -2639,15 +2513,14 @@
 
 
 void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
-        const CaptureResultExtras &resultExtras, uint32_t frameNumber,
-        const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+        const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
     Mutex::Autolock l(mOutputLock);
 
     CaptureResult captureResult;
     captureResult.mResultExtras = resultExtras;
     captureResult.mMetadata = partialResult;
 
-    insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+    insertResultLocked(&captureResult, frameNumber);
 }
 
 
@@ -2655,8 +2528,7 @@
         CaptureResultExtras &resultExtras,
         CameraMetadata &collectedPartialResult,
         uint32_t frameNumber,
-        bool reprocess,
-        const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+        bool reprocess) {
     if (pendingMetadata.isEmpty())
         return;
 
@@ -2690,15 +2562,6 @@
         captureResult.mMetadata.append(collectedPartialResult);
     }
 
-    // Derive some new keys for backward compaibility
-    if (mDerivePostRawSensKey && !captureResult.mMetadata.exists(
-            ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
-        int32_t defaultBoost[1] = {100};
-        captureResult.mMetadata.update(
-                ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
-                defaultBoost, 1);
-    }
-
     captureResult.mMetadata.sort();
 
     // Check that there's a timestamp in the result metadata
@@ -2712,7 +2575,7 @@
     mTagMonitor.monitorMetadata(TagMonitor::RESULT,
             frameNumber, timestamp.data.i64[0], captureResult.mMetadata);
 
-    insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+    insertResultLocked(&captureResult, frameNumber);
 }
 
 /**
@@ -2732,10 +2595,7 @@
         return;
     }
 
-    // For HAL3.2 or above, If HAL doesn't support partial, it must always set
-    // partial_result to 1 when metadata is included in this result.
     if (!mUsePartialResult &&
-            mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 &&
             result->result != NULL &&
             result->partial_result != 1) {
         SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
@@ -2780,39 +2640,21 @@
 
         // Check if this result carries only partial metadata
         if (mUsePartialResult && result->result != NULL) {
-            if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-                if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
-                    SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
-                            " the range of [1, %d] when metadata is included in the result",
-                            frameNumber, result->partial_result, mNumPartialResults);
-                    return;
-                }
-                isPartialResult = (result->partial_result < mNumPartialResults);
-                if (isPartialResult) {
-                    request.collectedPartialResult.append(result->result);
-                }
-            } else {
-                camera_metadata_ro_entry_t partialResultEntry;
-                res = find_camera_metadata_ro_entry(result->result,
-                        ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);
-                if (res != NAME_NOT_FOUND &&
-                        partialResultEntry.count > 0 &&
-                        partialResultEntry.data.u8[0] ==
-                        ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
-                    // A partial result. Flag this as such, and collect this
-                    // set of metadata into the in-flight entry.
-                    isPartialResult = true;
-                    request.collectedPartialResult.append(
-                        result->result);
-                    request.collectedPartialResult.erase(
-                        ANDROID_QUIRKS_PARTIAL_RESULT);
-                }
+            if (result->partial_result > mNumPartialResults || result->partial_result < 1) {
+                SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
+                        " the range of [1, %d] when metadata is included in the result",
+                        frameNumber, result->partial_result, mNumPartialResults);
+                return;
+            }
+            isPartialResult = (result->partial_result < mNumPartialResults);
+            if (isPartialResult) {
+                request.collectedPartialResult.append(result->result);
             }
 
             if (isPartialResult && request.hasCallback) {
                 // Send partial capture result
-                sendPartialCaptureResult(result->result, request.resultExtras, frameNumber,
-                        request.aeTriggerCancelOverride);
+                sendPartialCaptureResult(result->result, request.resultExtras,
+                        frameNumber);
             }
         }
 
@@ -2877,8 +2719,8 @@
                 CameraMetadata metadata;
                 metadata = result->result;
                 sendCaptureResult(metadata, request.resultExtras,
-                    collectedPartialResult, frameNumber, hasInputBufferInRequest,
-                    request.aeTriggerCancelOverride);
+                    collectedPartialResult, frameNumber,
+                    hasInputBufferInRequest);
             }
         }
 
@@ -3058,7 +2900,7 @@
                 // send pending result and buffers
                 sendCaptureResult(r.pendingMetadata, r.resultExtras,
                     r.collectedPartialResult, msg.frame_number,
-                    r.hasInputBuffer, r.aeTriggerCancelOverride);
+                    r.hasInputBuffer);
             }
             returnOutputBuffers(r.pendingOutputBuffers.array(),
                 r.pendingOutputBuffers.size(), r.shutterTimestamp);
@@ -3606,14 +3448,11 @@
 
 Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
         sp<StatusTracker> statusTracker,
-        HalInterface* interface,
-        uint32_t deviceVersion,
-        bool aeLockAvailable) :
+        HalInterface* interface) :
         Thread(/*canCallJava*/false),
         mParent(parent),
         mStatusTracker(statusTracker),
         mInterface(interface),
-        mDeviceVersion(deviceVersion),
         mListener(nullptr),
         mId(getId(parent)),
         mReconfigured(false),
@@ -3625,7 +3464,6 @@
         mCurrentPreCaptureTriggerId(0),
         mRepeatingLastFrameNumber(
             hardware::camera2::ICameraDeviceUser::NO_IN_FLIGHT_REPEATING_FRAMES),
-        mAeLockAvailable(aeLockAvailable),
         mPrepareVideoStream(false) {
     mStatusId = statusTracker->addComponent();
 }
@@ -3820,11 +3658,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mFlushLock);
 
-    if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
-        return mInterface->flush();
-    }
-
-    return -ENOTSUP;
+    return mInterface->flush();
 }
 
 void Camera3Device::RequestThread::setPaused(bool paused) {
@@ -3857,65 +3691,6 @@
     mRequestSignal.signal();
 }
 
-
-/**
- * For devices <= CAMERA_DEVICE_API_VERSION_3_2, AE_PRECAPTURE_TRIGGER_CANCEL is not supported so
- * we need to override AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE and AE_LOCK_OFF
- * to AE_LOCK_ON to start cancelling AE precapture. If AE lock is not available, it still overrides
- * AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE but doesn't add AE_LOCK_ON to the
- * request.
- */
-void Camera3Device::RequestThread::handleAePrecaptureCancelRequest(const sp<CaptureRequest>& request) {
-    request->mAeTriggerCancelOverride.applyAeLock = false;
-    request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = false;
-
-    if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
-        return;
-    }
-
-    camera_metadata_entry_t aePrecaptureTrigger =
-            request->mSettings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
-    if (aePrecaptureTrigger.count > 0 &&
-            aePrecaptureTrigger.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) {
-        // Always override CANCEL to IDLE
-        uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
-        request->mSettings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
-        request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = true;
-        request->mAeTriggerCancelOverride.aePrecaptureTrigger =
-                ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL;
-
-        if (mAeLockAvailable == true) {
-            camera_metadata_entry_t aeLock = request->mSettings.find(ANDROID_CONTROL_AE_LOCK);
-            if (aeLock.count == 0 ||  aeLock.data.u8[0] == ANDROID_CONTROL_AE_LOCK_OFF) {
-                uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_ON;
-                request->mSettings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
-                request->mAeTriggerCancelOverride.applyAeLock = true;
-                request->mAeTriggerCancelOverride.aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
-            }
-        }
-    }
-}
-
-/**
- * Override result metadata for cancelling AE precapture trigger applied in
- * handleAePrecaptureCancelRequest().
- */
-void Camera3Device::overrideResultForPrecaptureCancel(
-        CameraMetadata *result, const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
-    if (aeTriggerCancelOverride.applyAeLock) {
-        // Only devices <= v3.2 should have this override
-        assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2);
-        result->update(ANDROID_CONTROL_AE_LOCK, &aeTriggerCancelOverride.aeLock, 1);
-    }
-
-    if (aeTriggerCancelOverride.applyAePrecaptureTrigger) {
-        // Only devices <= v3.2 should have this override
-        assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2);
-        result->update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
-                &aeTriggerCancelOverride.aePrecaptureTrigger, 1);
-    }
-}
-
 void Camera3Device::RequestThread::checkAndStopRepeatingRequest() {
     bool surfaceAbandoned = false;
     int64_t lastFrameNumber = 0;
@@ -4283,7 +4058,6 @@
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
                 /*hasInput*/halRequest->input_buffer != NULL,
-                captureRequest->mAeTriggerCancelOverride,
                 hasCallback);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
                ", burstId = %" PRId32 ".",
@@ -4542,8 +4316,6 @@
         }
     }
 
-    handleAePrecaptureCancelRequest(nextRequest);
-
     return nextRequest;
 }
 
@@ -4629,9 +4401,7 @@
                 request->mResultExtras.afTriggerId = triggerId;
                 mCurrentAfTriggerId = triggerId;
             }
-            if (parent->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_2) {
-                continue; // Trigger ID tag is deprecated since device HAL 3.2
-            }
+            continue;
         }
 
         camera_metadata_entry entry = metadata.find(tag);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 8b76a97..c9876a6 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -166,8 +166,6 @@
 
     status_t prepare(int maxCount, int streamId) override;
 
-    uint32_t getDeviceVersion() override;
-
     ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
     ssize_t getPointCloudBufferSize() const;
     ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
@@ -342,12 +340,6 @@
 
     CameraMetadata             mRequestTemplateCache[CAMERA3_TEMPLATE_COUNT];
 
-    uint32_t                   mDeviceVersion;
-
-    // whether Camera3Device should derive ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST for
-    // backward compatibility. Should not be changed after initialization.
-    bool                       mDerivePostRawSensKey = false;
-
     struct Size {
         uint32_t width;
         uint32_t height;
@@ -405,13 +397,6 @@
     // words, camera device shouldn't be open during CPU suspend.
     nsecs_t                    mTimestampOffset;
 
-    typedef struct AeTriggerCancelOverride {
-        bool applyAeLock;
-        uint8_t aeLock;
-        bool applyAePrecaptureTrigger;
-        uint8_t aePrecaptureTrigger;
-    } AeTriggerCancelOverride_t;
-
     class CaptureRequest : public LightRefBase<CaptureRequest> {
       public:
         CameraMetadata                      mSettings;
@@ -421,9 +406,6 @@
                                             mOutputStreams;
         SurfaceMap                          mOutputSurfaces;
         CaptureResultExtras                 mResultExtras;
-        // Used to cancel AE precapture trigger for devices doesn't support
-        // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
-        AeTriggerCancelOverride_t           mAeTriggerCancelOverride;
         // The number of requests that should be submitted to HAL at a time.
         // For example, if batch size is 8, this request and the following 7
         // requests will be submitted to HAL at a time. The batch size for
@@ -599,11 +581,6 @@
     static nsecs_t getMonoToBoottimeOffset();
 
     /**
-     * Helper function to map between legacy and new dataspace enums
-     */
-    static android_dataspace mapToLegacyDataspace(android_dataspace dataSpace);
-
-    /**
      * Helper functions to map between framework and HIDL values
      */
     static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
@@ -648,9 +625,7 @@
 
         RequestThread(wp<Camera3Device> parent,
                 sp<camera3::StatusTracker> statusTracker,
-                HalInterface* interface,
-                uint32_t deviceVersion,
-                bool aeLockAvailable);
+                HalInterface* interface);
         ~RequestThread();
 
         void     setNotificationListener(wp<NotificationListener> listener);
@@ -784,9 +759,6 @@
         // If the input request is in mRepeatingRequests. Must be called with mRequestLock hold
         bool isRepeatingRequestLocked(const sp<CaptureRequest>&);
 
-        // Handle AE precapture trigger cancel for devices <= CAMERA_DEVICE_API_VERSION_3_2.
-        void handleAePrecaptureCancelRequest(const sp<CaptureRequest>& request);
-
         // Clear repeating requests. Must be called with mRequestLock held.
         status_t clearRepeatingRequestsLocked(/*out*/ int64_t *lastFrameNumber = NULL);
 
@@ -799,7 +771,6 @@
         wp<Camera3Device>  mParent;
         wp<camera3::StatusTracker>  mStatusTracker;
         HalInterface*      mInterface;
-        uint32_t           mDeviceVersion;
 
         wp<NotificationListener> mListener;
 
@@ -849,9 +820,6 @@
 
         int64_t            mRepeatingLastFrameNumber;
 
-        // Whether the device supports AE lock
-        bool               mAeLockAvailable;
-
         // Flag indicating if we should prepare video stream for video requests.
         bool               mPrepareVideoStream;
     };
@@ -892,10 +860,6 @@
         // the shutter event.
         Vector<camera3_stream_buffer_t> pendingOutputBuffers;
 
-        // Used to cancel AE precapture trigger for devices doesn't support
-        // CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
-        AeTriggerCancelOverride_t aeTriggerCancelOverride;
-
         // Whether this inflight request's shutter and result callback are to be
         // called. The policy is that if the request is the last one in the constrained
         // high speed recording request list, this flag will be true. If the request list
@@ -910,12 +874,11 @@
                 haveResultMetadata(false),
                 numBuffersLeft(0),
                 hasInputBuffer(false),
-                aeTriggerCancelOverride({false, 0, false, 0}),
                 hasCallback(true) {
         }
 
         InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
-                AeTriggerCancelOverride aeTriggerCancelOverride, bool hasAppCallback) :
+                bool hasAppCallback) :
                 shutterTimestamp(0),
                 sensorTimestamp(0),
                 requestStatus(OK),
@@ -923,7 +886,6 @@
                 numBuffersLeft(numBuffers),
                 resultExtras(extras),
                 hasInputBuffer(hasInput),
-                aeTriggerCancelOverride(aeTriggerCancelOverride),
                 hasCallback(hasAppCallback) {
         }
     };
@@ -937,14 +899,7 @@
 
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
-            const AeTriggerCancelOverride_t &aeTriggerCancelOverride, bool callback);
-
-    /**
-     * Override result metadata for cancelling AE precapture trigger applied in
-     * handleAePrecaptureCancelRequest().
-     */
-    void overrideResultForPrecaptureCancel(CameraMetadata* result,
-            const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+            bool callback);
 
     /**
      * Tracking for idle detection
@@ -1039,21 +994,19 @@
 
     // Send a partial capture result.
     void sendPartialCaptureResult(const camera_metadata_t * partialResult,
-            const CaptureResultExtras &resultExtras, uint32_t frameNumber,
-            const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+            const CaptureResultExtras &resultExtras, uint32_t frameNumber);
 
     // Send a total capture result given the pending metadata and result extras,
     // partial results, and the frame number to the result queue.
     void sendCaptureResult(CameraMetadata &pendingMetadata,
             CaptureResultExtras &resultExtras,
             CameraMetadata &collectedPartialResult, uint32_t frameNumber,
-            bool reprocess, const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+            bool reprocess);
 
     // Insert the result to the result queue after updating frame number and overriding AE
     // trigger cancel.
     // mOutputLock must be held when calling this function.
-    void insertResultLocked(CaptureResult *result, uint32_t frameNumber,
-            const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+    void insertResultLocked(CaptureResult *result, uint32_t frameNumber);
 
     /**** Scope for mInFlightLock ****/
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 2b1a899..b45ef77 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -663,89 +663,6 @@
     }
 }
 
-status_t Camera3Stream::registerBuffersLocked(camera3_device *hal3Device) {
-    ATRACE_CALL();
-
-    /**
-     * >= CAMERA_DEVICE_API_VERSION_3_2:
-     *
-     * camera3_device_t->ops->register_stream_buffers() is not called and must
-     * be NULL.
-     */
-    if (hal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_2) {
-        ALOGV("%s: register_stream_buffers unused as of HAL3.2", __FUNCTION__);
-
-        if (hal3Device->ops->register_stream_buffers != NULL) {
-            ALOGE("%s: register_stream_buffers is deprecated in HAL3.2; "
-                    "must be set to NULL in camera3_device::ops", __FUNCTION__);
-            return INVALID_OPERATION;
-        }
-
-        return OK;
-    }
-
-    ALOGV("%s: register_stream_buffers using deprecated code path", __FUNCTION__);
-
-    status_t res;
-
-    size_t bufferCount = getBufferCountLocked();
-
-    Vector<buffer_handle_t*> buffers;
-    buffers.insertAt(/*prototype_item*/NULL, /*index*/0, bufferCount);
-
-    camera3_stream_buffer_set bufferSet = camera3_stream_buffer_set();
-    bufferSet.stream = this;
-    bufferSet.num_buffers = bufferCount;
-    bufferSet.buffers = buffers.editArray();
-
-    Vector<camera3_stream_buffer_t> streamBuffers;
-    streamBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
-
-    // Register all buffers with the HAL. This means getting all the buffers
-    // from the stream, providing them to the HAL with the
-    // register_stream_buffers() method, and then returning them back to the
-    // stream in the error state, since they won't have valid data.
-    //
-    // Only registered buffers can be sent to the HAL.
-
-    uint32_t bufferIdx = 0;
-    for (; bufferIdx < bufferCount; bufferIdx++) {
-        res = getBufferLocked( &streamBuffers.editItemAt(bufferIdx) );
-        if (res != OK) {
-            ALOGE("%s: Unable to get buffer %d for registration with HAL",
-                    __FUNCTION__, bufferIdx);
-            // Skip registering, go straight to cleanup
-            break;
-        }
-
-        sp<Fence> fence = new Fence(streamBuffers[bufferIdx].acquire_fence);
-        fence->waitForever("Camera3Stream::registerBuffers");
-
-        buffers.editItemAt(bufferIdx) = streamBuffers[bufferIdx].buffer;
-    }
-    if (bufferIdx == bufferCount) {
-        // Got all buffers, register with HAL
-        ALOGV("%s: Registering %zu buffers with camera HAL",
-                __FUNCTION__, bufferCount);
-        ATRACE_BEGIN("camera3->register_stream_buffers");
-        res = hal3Device->ops->register_stream_buffers(hal3Device,
-                &bufferSet);
-        ATRACE_END();
-    }
-
-    // Return all valid buffers to stream, in ERROR state to indicate
-    // they weren't filled.
-    for (size_t i = 0; i < bufferIdx; i++) {
-        streamBuffers.editItemAt(i).release_fence = -1;
-        streamBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
-        returnBufferLocked(streamBuffers[i], 0);
-    }
-
-    mPrepared = true;
-
-    return res;
-}
-
 status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *,
         const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 27ef86d..9cdc1b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -474,9 +474,6 @@
     Condition mInputBufferReturnedSignal;
     static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
 
-    // Gets all buffers from endpoint and registers them with the HAL.
-    status_t registerBuffersLocked(camera3_device *hal3Device);
-
     void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
                                   bool acquired, bool output);
     List<wp<Camera3StreamBufferListener> > mBufferListenerList;
diff --git a/services/mediacodec/main_codecservice.cpp b/services/mediacodec/main_codecservice.cpp
index c4e4cff..c59944a 100644
--- a/services/mediacodec/main_codecservice.cpp
+++ b/services/mediacodec/main_codecservice.cpp
@@ -31,9 +31,9 @@
 #include "MediaCodecService.h"
 #include "minijail.h"
 
-#include <android/hardware/media/omx/1.0/IOmx.h>
 #include <hidl/HidlTransportSupport.h>
 #include <omx/1.0/Omx.h>
+#include <omx/1.0/OmxStore.h>
 
 using namespace android;
 
@@ -61,17 +61,23 @@
 
     if (treble) {
         using namespace ::android::hardware::media::omx::V1_0;
+        sp<IOmxStore> omxStore = new implementation::OmxStore();
+        if (omxStore == nullptr) {
+            LOG(ERROR) << "Cannot create IOmxStore HAL service.";
+        } else if (omxStore->registerAsService() != OK) {
+            LOG(ERROR) << "Cannot register IOmxStore HAL service.";
+        }
         sp<IOmx> omx = new implementation::Omx();
         if (omx == nullptr) {
-            LOG(ERROR) << "Cannot create a Treble IOmx service.";
+            LOG(ERROR) << "Cannot create IOmx HAL service.";
         } else if (omx->registerAsService() != OK) {
-            LOG(ERROR) << "Cannot register a Treble IOmx service.";
+            LOG(ERROR) << "Cannot register IOmx HAL service.";
         } else {
-            LOG(INFO) << "Treble IOmx service created.";
+            LOG(INFO) << "Treble OMX service created.";
         }
     } else {
         MediaCodecService::instantiate();
-        LOG(INFO) << "Non-Treble IOMX service created.";
+        LOG(INFO) << "Non-Treble OMX service created.";
     }
 
     ProcessState::self()->startThreadPool();