Merge "Add explicit cutils to stagefright"
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index e91904c..be08c19 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -35,6 +35,8 @@
     kKeyHeight            = 'heig',  // int32_t, image pixel
     kKeyDisplayWidth      = 'dWid',  // int32_t, display/presentation
     kKeyDisplayHeight     = 'dHgt',  // int32_t, display/presentation
+    kKeySARWidth          = 'sarW',  // int32_t, sampleAspectRatio width
+    kKeySARHeight         = 'sarH',  // int32_t, sampleAspectRatio height
 
     // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1)
     kKeyCropRect          = 'crop',
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index f2b6441..a35d562 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -13,6 +13,7 @@
 
 LOCAL_SRC_FILES:= \
     AudioTrack.cpp \
+    AudioTrackShared.cpp \
     IAudioFlinger.cpp \
     IAudioFlingerClient.cpp \
     IAudioTrack.cpp \
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index ac672a7..1d87ff8 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -1482,180 +1482,4 @@
     }
 }
 
-// =========================================================================
-
-
-audio_track_cblk_t::audio_track_cblk_t()
-    : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0),
-    userBase(0), serverBase(0), frameCount_(0),
-    loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000),
-    mSendLevel(0), flags(0)
-{
-}
-
-uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut)
-{
-    ALOGV("stepuser %08x %08x %d", user, server, stepCount);
-
-    uint32_t u = user;
-    u += stepCount;
-    // Ensure that user is never ahead of server for AudioRecord
-    if (isOut) {
-        // If stepServer() has been called once, switch to normal obtainBuffer() timeout period
-        if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) {
-            bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
-        }
-    } else if (u > server) {
-        ALOGW("stepUser occurred after track reset");
-        u = server;
-    }
-
-    if (u >= frameCount) {
-        // common case, user didn't just wrap
-        if (u - frameCount >= userBase ) {
-            userBase += frameCount;
-        }
-    } else if (u >= userBase + frameCount) {
-        // user just wrapped
-        userBase += frameCount;
-    }
-
-    user = u;
-
-    // Clear flow control error condition as new data has been written/read to/from buffer.
-    if (flags & CBLK_UNDERRUN) {
-        android_atomic_and(~CBLK_UNDERRUN, &flags);
-    }
-
-    return u;
-}
-
-bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut)
-{
-    ALOGV("stepserver %08x %08x %d", user, server, stepCount);
-
-    if (!tryLock()) {
-        ALOGW("stepServer() could not lock cblk");
-        return false;
-    }
-
-    uint32_t s = server;
-    bool flushed = (s == user);
-
-    s += stepCount;
-    if (isOut) {
-        // Mark that we have read the first buffer so that next time stepUser() is called
-        // we switch to normal obtainBuffer() timeout period
-        if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) {
-            bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1;
-        }
-        // It is possible that we receive a flush()
-        // while the mixer is processing a block: in this case,
-        // stepServer() is called After the flush() has reset u & s and
-        // we have s > u
-        if (flushed) {
-            ALOGW("stepServer occurred after track reset");
-            s = user;
-        }
-    }
-
-    if (s >= loopEnd) {
-        ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd);
-        s = loopStart;
-        if (--loopCount == 0) {
-            loopEnd = UINT_MAX;
-            loopStart = UINT_MAX;
-        }
-    }
-
-    if (s >= frameCount) {
-        // common case, server didn't just wrap
-        if (s - frameCount >= serverBase ) {
-            serverBase += frameCount;
-        }
-    } else if (s >= serverBase + frameCount) {
-        // server just wrapped
-        serverBase += frameCount;
-    }
-
-    server = s;
-
-    if (!(flags & CBLK_INVALID)) {
-        cv.signal();
-    }
-    lock.unlock();
-    return true;
-}
-
-void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const
-{
-    return (int8_t *)buffers + (offset - userBase) * frameSize;
-}
-
-uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut)
-{
-    Mutex::Autolock _l(lock);
-    return framesAvailable_l(frameCount, isOut);
-}
-
-uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut)
-{
-    uint32_t u = user;
-    uint32_t s = server;
-
-    if (isOut) {
-        uint32_t limit = (s < loopStart) ? s : loopStart;
-        return limit + frameCount - u;
-    } else {
-        return frameCount + u - s;
-    }
-}
-
-uint32_t audio_track_cblk_t::framesReady(bool isOut)
-{
-    uint32_t u = user;
-    uint32_t s = server;
-
-    if (isOut) {
-        if (u < loopEnd) {
-            return u - s;
-        } else {
-            // do not block on mutex shared with client on AudioFlinger side
-            if (!tryLock()) {
-                ALOGW("framesReady() could not lock cblk");
-                return 0;
-            }
-            uint32_t frames = UINT_MAX;
-            if (loopCount >= 0) {
-                frames = (loopEnd - loopStart)*loopCount + u - s;
-            }
-            lock.unlock();
-            return frames;
-        }
-    } else {
-        return s - u;
-    }
-}
-
-bool audio_track_cblk_t::tryLock()
-{
-    // the code below simulates lock-with-timeout
-    // we MUST do this to protect the AudioFlinger server
-    // as this lock is shared with the client.
-    status_t err;
-
-    err = lock.tryLock();
-    if (err == -EBUSY) { // just wait a bit
-        usleep(1000);
-        err = lock.tryLock();
-    }
-    if (err != NO_ERROR) {
-        // probably, the client just died.
-        return false;
-    }
-    return true;
-}
-
-// -------------------------------------------------------------------------
-
 }; // namespace android
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
new file mode 100644
index 0000000..bee13c8
--- /dev/null
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioTrackShared"
+//#define LOG_NDEBUG 0
+
+#include <private/media/AudioTrackShared.h>
+#include <utils/Log.h>
+
+namespace android {
+
+audio_track_cblk_t::audio_track_cblk_t()
+    : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0),
+    userBase(0), serverBase(0), frameCount_(0),
+    loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000),
+    mSendLevel(0), flags(0)
+{
+}
+
+uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut)
+{
+    ALOGV("stepuser %08x %08x %d", user, server, stepCount);
+
+    uint32_t u = user;
+    u += stepCount;
+    // Ensure that user is never ahead of server for AudioRecord
+    if (isOut) {
+        // If stepServer() has been called once, switch to normal obtainBuffer() timeout period
+        if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) {
+            bufferTimeoutMs = MAX_RUN_TIMEOUT_MS;
+        }
+    } else if (u > server) {
+        ALOGW("stepUser occurred after track reset");
+        u = server;
+    }
+
+    if (u >= frameCount) {
+        // common case, user didn't just wrap
+        if (u - frameCount >= userBase ) {
+            userBase += frameCount;
+        }
+    } else if (u >= userBase + frameCount) {
+        // user just wrapped
+        userBase += frameCount;
+    }
+
+    user = u;
+
+    // Clear flow control error condition as new data has been written/read to/from buffer.
+    if (flags & CBLK_UNDERRUN) {
+        android_atomic_and(~CBLK_UNDERRUN, &flags);
+    }
+
+    return u;
+}
+
+bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut)
+{
+    ALOGV("stepserver %08x %08x %d", user, server, stepCount);
+
+    if (!tryLock()) {
+        ALOGW("stepServer() could not lock cblk");
+        return false;
+    }
+
+    uint32_t s = server;
+    bool flushed = (s == user);
+
+    s += stepCount;
+    if (isOut) {
+        // Mark that we have read the first buffer so that next time stepUser() is called
+        // we switch to normal obtainBuffer() timeout period
+        if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) {
+            bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1;
+        }
+        // It is possible that we receive a flush()
+        // while the mixer is processing a block: in this case,
+        // stepServer() is called After the flush() has reset u & s and
+        // we have s > u
+        if (flushed) {
+            ALOGW("stepServer occurred after track reset");
+            s = user;
+        }
+    }
+
+    if (s >= loopEnd) {
+        ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd);
+        s = loopStart;
+        if (--loopCount == 0) {
+            loopEnd = UINT_MAX;
+            loopStart = UINT_MAX;
+        }
+    }
+
+    if (s >= frameCount) {
+        // common case, server didn't just wrap
+        if (s - frameCount >= serverBase ) {
+            serverBase += frameCount;
+        }
+    } else if (s >= serverBase + frameCount) {
+        // server just wrapped
+        serverBase += frameCount;
+    }
+
+    server = s;
+
+    if (!(flags & CBLK_INVALID)) {
+        cv.signal();
+    }
+    lock.unlock();
+    return true;
+}
+
+void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const
+{
+    return (int8_t *)buffers + (offset - userBase) * frameSize;
+}
+
+uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut)
+{
+    Mutex::Autolock _l(lock);
+    return framesAvailable_l(frameCount, isOut);
+}
+
+uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut)
+{
+    uint32_t u = user;
+    uint32_t s = server;
+
+    if (isOut) {
+        uint32_t limit = (s < loopStart) ? s : loopStart;
+        return limit + frameCount - u;
+    } else {
+        return frameCount + u - s;
+    }
+}
+
+uint32_t audio_track_cblk_t::framesReady(bool isOut)
+{
+    uint32_t u = user;
+    uint32_t s = server;
+
+    if (isOut) {
+        if (u < loopEnd) {
+            return u - s;
+        } else {
+            // do not block on mutex shared with client on AudioFlinger side
+            if (!tryLock()) {
+                ALOGW("framesReady() could not lock cblk");
+                return 0;
+            }
+            uint32_t frames = UINT_MAX;
+            if (loopCount >= 0) {
+                frames = (loopEnd - loopStart)*loopCount + u - s;
+            }
+            lock.unlock();
+            return frames;
+        }
+    } else {
+        return s - u;
+    }
+}
+
+bool audio_track_cblk_t::tryLock()
+{
+    // the code below simulates lock-with-timeout
+    // we MUST do this to protect the AudioFlinger server
+    // as this lock is shared with the client.
+    status_t err;
+
+    err = lock.tryLock();
+    if (err == -EBUSY) { // just wait a bit
+        usleep(1000);
+        err = lock.tryLock();
+    }
+    if (err != NO_ERROR) {
+        // probably, the client just died.
+        return false;
+    }
+    return true;
+}
+
+}   // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index f363568..0f30372 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -74,6 +74,21 @@
     DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
 };
 
+struct NuPlayer::SetSurfaceAction : public Action {
+    SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper)
+        : mWrapper(wrapper) {
+    }
+
+    virtual void execute(NuPlayer *player) {
+        player->performSetSurface(mWrapper);
+    }
+
+private:
+    sp<NativeWindowWrapper> mWrapper;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);
+};
+
 // Use this if there's no state necessary to save in order to execute
 // the action.
 struct NuPlayer::SimpleAction : public Action {
@@ -111,7 +126,8 @@
       mVideoLateByUs(0ll),
       mNumFramesTotal(0ll),
       mNumFramesDropped(0ll),
-      mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW) {
+      mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
+      mStarted(false) {
 }
 
 NuPlayer::~NuPlayer() {
@@ -181,11 +197,19 @@
     msg->post();
 }
 
-void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+void NuPlayer::setVideoSurfaceTextureAsync(
+        const sp<ISurfaceTexture> &surfaceTexture) {
     sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id());
-    sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ?
-                new SurfaceTextureClient(surfaceTexture) : NULL);
-    msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient));
+
+    if (surfaceTexture == NULL) {
+        msg->setObject("native-window", NULL);
+    } else {
+        msg->setObject(
+                "native-window",
+                new NativeWindowWrapper(
+                    new SurfaceTextureClient(surfaceTexture)));
+    }
+
     msg->post();
 }
 
@@ -278,13 +302,24 @@
         {
             ALOGV("kWhatSetVideoNativeWindow");
 
+            mDeferredActions.push_back(
+                    new SimpleAction(&NuPlayer::performDecoderShutdown));
+
             sp<RefBase> obj;
             CHECK(msg->findObject("native-window", &obj));
 
-            mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
+            mDeferredActions.push_back(
+                    new SetSurfaceAction(
+                        static_cast<NativeWindowWrapper *>(obj.get())));
 
-            // XXX - ignore error from setVideoScalingMode for now
-            setVideoScalingMode(mVideoScalingMode);
+            if (obj != NULL) {
+                // If there is a new surface texture, instantiate decoders
+                // again if possible.
+                mDeferredActions.push_back(
+                        new SimpleAction(&NuPlayer::performScanSources));
+            }
+
+            processDeferredActions();
             break;
         }
 
@@ -311,6 +346,7 @@
             mVideoLateByUs = 0;
             mNumFramesTotal = 0;
             mNumFramesDropped = 0;
+            mStarted = true;
 
             mSource->start();
 
@@ -448,7 +484,8 @@
             } else if (what == ACodec::kWhatOutputFormatChanged) {
                 if (audio) {
                     int32_t numChannels;
-                    CHECK(codecRequest->findInt32("channel-count", &numChannels));
+                    CHECK(codecRequest->findInt32(
+                                "channel-count", &numChannels));
 
                     int32_t sampleRate;
                     CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
@@ -460,13 +497,15 @@
 
                     audio_output_flags_t flags;
                     int64_t durationUs;
-                    // FIXME: we should handle the case where the video decoder is created after
-                    // we receive the format change indication. Current code will just make that
-                    // we select deep buffer with video which should not be a problem as it should
+                    // FIXME: we should handle the case where the video decoder
+                    // is created after we receive the format change indication.
+                    // Current code will just make that we select deep buffer
+                    // with video which should not be a problem as it should
                     // not prevent from keeping A/V sync.
                     if (mVideoDecoder == NULL &&
                             mSource->getDuration(&durationUs) == OK &&
-                            durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+                            durationUs
+                                > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
                         flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
                     } else {
                         flags = AUDIO_OUTPUT_FLAG_NONE;
@@ -502,17 +541,35 @@
                                 "crop",
                                 &cropLeft, &cropTop, &cropRight, &cropBottom));
 
+                    int32_t displayWidth = cropRight - cropLeft + 1;
+                    int32_t displayHeight = cropBottom - cropTop + 1;
+
                     ALOGV("Video output format changed to %d x %d "
                          "(crop: %d x %d @ (%d, %d))",
                          width, height,
-                         (cropRight - cropLeft + 1),
-                         (cropBottom - cropTop + 1),
+                         displayWidth,
+                         displayHeight,
                          cropLeft, cropTop);
 
+                    sp<AMessage> videoInputFormat =
+                        mSource->getFormat(false /* audio */);
+
+                    // Take into account sample aspect ratio if necessary:
+                    int32_t sarWidth, sarHeight;
+                    if (videoInputFormat->findInt32("sar-width", &sarWidth)
+                            && videoInputFormat->findInt32(
+                                "sar-height", &sarHeight)) {
+                        ALOGV("Sample aspect ratio %d : %d",
+                              sarWidth, sarHeight);
+
+                        displayWidth = (displayWidth * sarWidth) / sarHeight;
+
+                        ALOGV("display dimensions %d x %d",
+                              displayWidth, displayHeight);
+                    }
+
                     notifyListener(
-                            MEDIA_SET_VIDEO_SIZE,
-                            cropRight - cropLeft + 1,
-                            cropBottom - cropTop + 1);
+                            MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight);
                 }
             } else if (what == ACodec::kWhatShutdownCompleted) {
                 ALOGV("%s shutdown completed", audio ? "audio" : "video");
@@ -986,8 +1043,7 @@
 
 status_t NuPlayer::setVideoScalingMode(int32_t mode) {
     mVideoScalingMode = mode;
-    if (mNativeWindow != NULL
-            && mNativeWindow->getNativeWindow() != NULL) {
+    if (mNativeWindow != NULL) {
         status_t ret = native_window_set_scaling_mode(
                 mNativeWindow->getNativeWindow().get(), mVideoScalingMode);
         if (ret != OK) {
@@ -1122,14 +1178,36 @@
             driver->notifyResetComplete();
         }
     }
+
+    mStarted = false;
 }
 
 void NuPlayer::performScanSources() {
     ALOGV("performScanSources");
 
+    if (!mStarted) {
+        return;
+    }
+
     if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
         postScanSources();
     }
 }
 
+void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) {
+    ALOGV("performSetSurface");
+
+    mNativeWindow = wrapper;
+
+    // XXX - ignore error from setVideoScalingMode for now
+    setVideoScalingMode(mVideoScalingMode);
+
+    if (mDriver != NULL) {
+        sp<NuPlayerDriver> driver = mDriver.promote();
+        if (driver != NULL) {
+            driver->notifySetSurfaceComplete();
+        }
+    }
+}
+
 }  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 6e174e0..ca87be9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -42,7 +42,9 @@
 
     void setDataSource(int fd, int64_t offset, int64_t length);
 
-    void setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
+    void setVideoSurfaceTextureAsync(
+            const sp<ISurfaceTexture> &surfaceTexture);
+
     void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink);
     void start();
 
@@ -75,6 +77,7 @@
     struct StreamingSource;
     struct Action;
     struct SeekAction;
+    struct SetSurfaceAction;
     struct SimpleAction;
 
     enum {
@@ -140,6 +143,8 @@
 
     int32_t mVideoScalingMode;
 
+    bool mStarted;
+
     status_t instantiateDecoder(bool audio, sp<Decoder> *decoder);
 
     status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
@@ -165,6 +170,7 @@
     void performDecoderShutdown();
     void performReset();
     void performScanSources();
+    void performSetSurface(const sp<NativeWindowWrapper> &wrapper);
 
     DISALLOW_EVIL_CONSTRUCTORS(NuPlayer);
 };
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index d03601f..a485dda 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -29,6 +29,7 @@
 
 NuPlayerDriver::NuPlayerDriver()
     : mResetInProgress(false),
+      mSetSurfaceInProgress(false),
       mDurationUs(-1),
       mPositionUs(-1),
       mNumFramesTotal(0),
@@ -97,7 +98,19 @@
 
 status_t NuPlayerDriver::setVideoSurfaceTexture(
         const sp<ISurfaceTexture> &surfaceTexture) {
-    mPlayer->setVideoSurfaceTexture(surfaceTexture);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mResetInProgress) {
+        return INVALID_OPERATION;
+    }
+
+    mSetSurfaceInProgress = true;
+
+    mPlayer->setVideoSurfaceTextureAsync(surfaceTexture);
+
+    while (mSetSurfaceInProgress) {
+        mCondition.wait(mLock);
+    }
 
     return OK;
 }
@@ -308,6 +321,13 @@
     mCondition.broadcast();
 }
 
+void NuPlayerDriver::notifySetSurfaceComplete() {
+    Mutex::Autolock autoLock(mLock);
+    CHECK(mSetSurfaceInProgress);
+    mSetSurfaceInProgress = false;
+    mCondition.broadcast();
+}
+
 void NuPlayerDriver::notifyDuration(int64_t durationUs) {
     Mutex::Autolock autoLock(mLock);
     mDurationUs = durationUs;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 4a0026c..d551bf1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -62,6 +62,7 @@
     virtual status_t dump(int fd, const Vector<String16> &args) const;
 
     void notifyResetComplete();
+    void notifySetSurfaceComplete();
     void notifyDuration(int64_t durationUs);
     void notifyPosition(int64_t positionUs);
     void notifySeekComplete();
@@ -78,6 +79,7 @@
     // The following are protected through "mLock"
     // >>>
     bool mResetInProgress;
+    bool mSetSurfaceInProgress;
     int64_t mDurationUs;
     int64_t mPositionUs;
     int64_t mNumFramesTotal;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 2a7b2ae..2b20ab0 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -966,17 +966,23 @@
             err = INVALID_OPERATION;
         } else {
             if (encoder) {
-                if (!msg->findInt32("flac-compression-level", &compressionLevel)) {
+                if (!msg->findInt32(
+                            "flac-compression-level", &compressionLevel)) {
                     compressionLevel = 5;// default FLAC compression level
                 } else if (compressionLevel < 0) {
-                    ALOGW("compression level %d outside [0..8] range, using 0", compressionLevel);
+                    ALOGW("compression level %d outside [0..8] range, "
+                          "using 0",
+                          compressionLevel);
                     compressionLevel = 0;
                 } else if (compressionLevel > 8) {
-                    ALOGW("compression level %d outside [0..8] range, using 8", compressionLevel);
+                    ALOGW("compression level %d outside [0..8] range, "
+                          "using 8",
+                          compressionLevel);
                     compressionLevel = 8;
                 }
             }
-            err = setupFlacCodec(encoder, numChannels, sampleRate, compressionLevel);
+            err = setupFlacCodec(
+                    encoder, numChannels, sampleRate, compressionLevel);
         }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
         int32_t numChannels, sampleRate;
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 74e9222..1a6ff4b 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -85,6 +85,13 @@
 
         msg->setInt32("width", width);
         msg->setInt32("height", height);
+
+        int32_t sarWidth, sarHeight;
+        if (meta->findInt32(kKeySARWidth, &sarWidth)
+                && meta->findInt32(kKeySARHeight, &sarHeight)) {
+            msg->setInt32("sar-width", sarWidth);
+            msg->setInt32("sar-height", sarHeight);
+        }
     } else if (!strncasecmp("audio/", mime, 6)) {
         int32_t numChannels, sampleRate;
         CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
@@ -372,6 +379,13 @@
         } else {
             ALOGW("did not find width and/or height");
         }
+
+        int32_t sarWidth, sarHeight;
+        if (msg->findInt32("sar-width", &sarWidth)
+                && msg->findInt32("sar-height", &sarHeight)) {
+            meta->setInt32(kKeySARWidth, sarWidth);
+            meta->setInt32(kKeySARHeight, sarHeight);
+        }
     } else if (mime.startsWith("audio/")) {
         int32_t numChannels;
         if (msg->findInt32("channel-count", &numChannels)) {
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index a141752..b822868 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -22,6 +22,7 @@
 
 #include <media/stagefright/foundation/ABitReader.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
@@ -41,7 +42,9 @@
 
 // Determine video dimensions from the sequence parameterset.
 void FindAVCDimensions(
-        const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height) {
+        const sp<ABuffer> &seqParamSet,
+        int32_t *width, int32_t *height,
+        int32_t *sarWidth, int32_t *sarHeight) {
     ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1);
 
     unsigned profile_idc = br.getBits(8);
@@ -129,6 +132,48 @@
         *height -=
             (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY;
     }
+
+    if (sarWidth != NULL) {
+        *sarWidth = 0;
+    }
+
+    if (sarHeight != NULL) {
+        *sarHeight = 0;
+    }
+
+    if (br.getBits(1)) {  // vui_parameters_present_flag
+        unsigned sar_width = 0, sar_height = 0;
+
+        if (br.getBits(1)) {  // aspect_ratio_info_present_flag
+            unsigned aspect_ratio_idc = br.getBits(8);
+
+            if (aspect_ratio_idc == 255 /* extendedSAR */) {
+                sar_width = br.getBits(16);
+                sar_height = br.getBits(16);
+            } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) {
+                static const int32_t kFixedSARWidth[] = {
+                    1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160
+                };
+
+                static const int32_t kFixedSARHeight[] = {
+                    1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99
+                };
+
+                sar_width = kFixedSARWidth[aspect_ratio_idc - 1];
+                sar_height = kFixedSARHeight[aspect_ratio_idc - 1];
+            }
+        }
+
+        ALOGV("sample aspect ratio = %u : %u", sar_width, sar_height);
+
+        if (sarWidth != NULL) {
+            *sarWidth = sar_width;
+        }
+
+        if (sarHeight != NULL) {
+            *sarHeight = sar_height;
+        }
+    }
 }
 
 status_t getNextNALUnit(
@@ -254,7 +299,9 @@
     }
 
     int32_t width, height;
-    FindAVCDimensions(seqParamSet, &width, &height);
+    int32_t sarWidth, sarHeight;
+    FindAVCDimensions(
+            seqParamSet, &width, &height, &sarWidth, &sarHeight);
 
     size_t stopOffset;
     sp<ABuffer> picParamSet = FindNAL(data, size, 8, &stopOffset);
@@ -301,8 +348,29 @@
     meta->setInt32(kKeyWidth, width);
     meta->setInt32(kKeyHeight, height);
 
-    ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
-         width, height, AVCProfileToString(profile), level / 10, level % 10);
+    if (sarWidth > 1 || sarHeight > 1) {
+        // We treat 0:0 (unspecified) as 1:1.
+
+        meta->setInt32(kKeySARWidth, sarWidth);
+        meta->setInt32(kKeySARHeight, sarHeight);
+
+        ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d) "
+              "SAR %d : %d",
+             width,
+             height,
+             AVCProfileToString(profile),
+             level / 10,
+             level % 10,
+             sarWidth,
+             sarHeight);
+    } else {
+        ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
+             width,
+             height,
+             AVCProfileToString(profile),
+             level / 10,
+             level % 10);
+    }
 
     return meta;
 }
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index e418822..d517320 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -36,8 +36,11 @@
     kAVCProfileCAVLC444Intra = 0x2c
 };
 
+// Optionally returns sample aspect ratio as well.
 void FindAVCDimensions(
-        const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height);
+        const sp<ABuffer> &seqParamSet,
+        int32_t *width, int32_t *height,
+        int32_t *sarWidth = NULL, int32_t *sarHeight = NULL);
 
 unsigned parseUE(ABitReader *br);
 
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index dc65833..6d42143 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -78,6 +78,13 @@
 # LOCAL_SRC_FILES += AudioWatchdog.cpp
 # LOCAL_CFLAGS += -DAUDIO_WATCHDOG
 
+# Define ANDROID_SMP appropriately. Used to get inline tracing fast-path.
+ifeq ($(TARGET_CPU_SMP),true)
+    LOCAL_CFLAGS += -DANDROID_SMP=1
+else
+    LOCAL_CFLAGS += -DANDROID_SMP=0
+endif
+
 include $(BUILD_SHARED_LIBRARY)
 
 #
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 0366dfe..5e6af16 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -17,10 +17,7 @@
 #define LOG_TAG "FastMixer"
 //#define LOG_NDEBUG 0
 
-/** Uncomment for systrace.
- * ATRACE_TAG will default to ATRACE_TAG_NEVER in the header.
- */
-//#define ATRACE_TAG ATRACE_TAG_AUDIO
+#define ATRACE_TAG ATRACE_TAG_AUDIO
 
 #include <sys/atomics.h>
 #include <time.h>
@@ -376,14 +373,14 @@
                 // up to 1 ms.  If enough active tracks all blocked in sequence, this would result
                 // in the overall fast mix cycle being delayed.  Should use a non-blocking FIFO.
                 size_t framesReady = fastTrack->mBufferProvider->framesReady();
-#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER)
-                // I wish we had formatted trace names
-                char traceName[16];
-                strcpy(traceName, "framesReady");
-                traceName[11] = i + (i < 10 ? '0' : 'A' - 10);
-                traceName[12] = '\0';
-                ATRACE_INT(traceName, framesReady);
-#endif
+                if (ATRACE_ENABLED()) {
+                    // I wish we had formatted trace names
+                    char traceName[16];
+                    strcpy(traceName, "framesReady");
+                    traceName[11] = i + (i < 10 ? '0' : 'A' - 10);
+                    traceName[12] = '\0';
+                    ATRACE_INT(traceName, framesReady);
+                }
                 FastTrackDump *ftDump = &dumpState->mTracks[i];
                 FastTrackUnderruns underruns = ftDump->mUnderruns;
                 if (framesReady < frameCount) {
@@ -429,13 +426,9 @@
             // FIXME write() is non-blocking and lock-free for a properly implemented NBAIO sink,
             //       but this code should be modified to handle both non-blocking and blocking sinks
             dumpState->mWriteSequence++;
-#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER)
             ATRACE_BEGIN("write");
-#endif
             ssize_t framesWritten = outputSink->write(mixBuffer, frameCount);
-#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER)
             ATRACE_END();
-#endif
             dumpState->mWriteSequence++;
             if (framesWritten >= 0) {
                 ALOG_ASSERT(framesWritten <= frameCount);
@@ -490,9 +483,7 @@
                 sleepNs = -1;
               if (isWarm) {
                 if (sec > 0 || nsec > underrunNs) {
-#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER)
-                    ScopedTrace st(ATRACE_TAG, "underrun");
-#endif
+                    ATRACE_NAME("underrun");
                     // FIXME only log occasionally
                     ALOGV("underrun: time since last cycle %d.%03ld sec",
                             (int) sec, nsec / 1000000L);
@@ -572,10 +563,8 @@
                 // this store #4 is not atomic with respect to stores #1, #2, #3 above, but
                 // the newest open and oldest closed halves are atomic with respect to each other
                 dumpState->mBounds = bounds;
-#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER)
                 ATRACE_INT("cycle_ms", monotonicNs / 1000000);
                 ATRACE_INT("load_us", loadNs / 1000);
-#endif
               }
 #endif
             } else {
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index 5a7bb48..e804f77 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -183,37 +183,37 @@
 
     result.append("    White balance mode: ");
     switch (p.wbMode) {
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_AUTO)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_INCANDESCENT)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_FLUORESCENT)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_WARM_FLUORESCENT)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_DAYLIGHT)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_TWILIGHT)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_SHADE)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
         default: result.append("UNKNOWN\n");
     }
 
     result.append("    Effect mode: ");
     switch (p.effectMode) {
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_OFF)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MONO)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_NEGATIVE)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_SOLARIZE)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_SEPIA)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_POSTERIZE)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_WHITEBOARD)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_BLACKBOARD)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_AQUA)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
         default: result.append("UNKNOWN\n");
     }
 
     result.append("    Antibanding mode: ");
     switch (p.antibandingMode) {
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_AUTO)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_OFF)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_50HZ)
-        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_60HZ)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
+        CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
         default: result.append("UNKNOWN\n");
     }
 
@@ -1260,7 +1260,7 @@
     }
     // Ignoring type
     if (l.mParameters.fastInfo.bestFaceDetectMode ==
-            ANDROID_STATS_FACE_DETECTION_OFF) {
+            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
         ALOGE("%s: Camera %d: Face detection not supported",
                 __FUNCTION__, mCameraId);
         return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
index e032522..8ee5de7 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
@@ -177,7 +177,7 @@
         SharedParameters::Lock l(client->getParameters());
         enableFaceDetect = l.mParameters.enableFaceDetect;
     }
-    entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
+    entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
 
     // TODO: This should be an error once implementations are compliant
     if (entry.count == 0) {
@@ -190,9 +190,9 @@
     Vector<camera_face_t> faces;
     metadata.number_of_faces = 0;
 
-    if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
+    if (enableFaceDetect && faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
         SharedParameters::Lock l(client->getParameters());
-        entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
+        entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
         if (entry.count == 0) {
             // No faces this frame
             /* warning: locks SharedCameraClient */
@@ -209,7 +209,7 @@
         }
         const int32_t *faceRects = entry.data.i32;
 
-        entry = frame.find(ANDROID_STATS_FACE_SCORES);
+        entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
         if (entry.count == 0) {
             ALOGE("%s: Camera %d: Unable to read face scores",
                     __FUNCTION__, client->getCameraId());
@@ -220,8 +220,8 @@
         const int32_t *faceLandmarks = NULL;
         const int32_t *faceIds = NULL;
 
-        if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
-            entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
+        if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
+            entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
             if (entry.count == 0) {
                 ALOGE("%s: Camera %d: Unable to read face landmarks",
                         __FUNCTION__, client->getCameraId());
@@ -229,7 +229,7 @@
             }
             faceLandmarks = entry.data.i32;
 
-            entry = frame.find(ANDROID_STATS_FACE_IDS);
+            entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
 
             if (entry.count == 0) {
                 ALOGE("%s: Camera %d: Unable to read face IDs",
@@ -256,7 +256,7 @@
             face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
 
             face.score = faceScores[i];
-            if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
+            if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
                 face.id = faceIds[i];
                 face.left_eye[0] =
                         l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index 93927e6..6ab19b1 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -278,7 +278,7 @@
     gpsProcessingMethod = "unknown";
     // GPS fields in CameraParameters are not set by implementation
 
-    wbMode = ANDROID_CONTROL_AWB_AUTO;
+    wbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
     params.set(CameraParameters::KEY_WHITE_BALANCE,
             CameraParameters::WHITE_BALANCE_AUTO);
 
@@ -291,40 +291,40 @@
             if (addComma) supportedWhiteBalance += ",";
             addComma = true;
             switch (availableWhiteBalanceModes.data.u8[i]) {
-            case ANDROID_CONTROL_AWB_AUTO:
+            case ANDROID_CONTROL_AWB_MODE_AUTO:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_AUTO;
                 break;
-            case ANDROID_CONTROL_AWB_INCANDESCENT:
+            case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_INCANDESCENT;
                 break;
-            case ANDROID_CONTROL_AWB_FLUORESCENT:
+            case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_FLUORESCENT;
                 break;
-            case ANDROID_CONTROL_AWB_WARM_FLUORESCENT:
+            case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT;
                 break;
-            case ANDROID_CONTROL_AWB_DAYLIGHT:
+            case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_DAYLIGHT;
                 break;
-            case ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT:
+            case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT;
                 break;
-            case ANDROID_CONTROL_AWB_TWILIGHT:
+            case ANDROID_CONTROL_AWB_MODE_TWILIGHT:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_TWILIGHT;
                 break;
-            case ANDROID_CONTROL_AWB_SHADE:
+            case ANDROID_CONTROL_AWB_MODE_SHADE:
                 supportedWhiteBalance +=
                     CameraParameters::WHITE_BALANCE_SHADE;
                 break;
             // Skipping values not mappable to v1 API
-            case ANDROID_CONTROL_AWB_OFF:
+            case ANDROID_CONTROL_AWB_MODE_OFF:
                 addComma = false;
                 break;
             default:
@@ -339,7 +339,7 @@
                 supportedWhiteBalance);
     }
 
-    effectMode = ANDROID_CONTROL_EFFECT_OFF;
+    effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
     params.set(CameraParameters::KEY_EFFECT,
             CameraParameters::EFFECT_NONE);
 
@@ -353,39 +353,39 @@
             if (addComma) supportedEffects += ",";
             addComma = true;
             switch (availableEffects.data.u8[i]) {
-                case ANDROID_CONTROL_EFFECT_OFF:
+                case ANDROID_CONTROL_EFFECT_MODE_OFF:
                     supportedEffects +=
                         CameraParameters::EFFECT_NONE;
                     break;
-                case ANDROID_CONTROL_EFFECT_MONO:
+                case ANDROID_CONTROL_EFFECT_MODE_MONO:
                     supportedEffects +=
                         CameraParameters::EFFECT_MONO;
                     break;
-                case ANDROID_CONTROL_EFFECT_NEGATIVE:
+                case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
                     supportedEffects +=
                         CameraParameters::EFFECT_NEGATIVE;
                     break;
-                case ANDROID_CONTROL_EFFECT_SOLARIZE:
+                case ANDROID_CONTROL_EFFECT_MODE_SOLARIZE:
                     supportedEffects +=
                         CameraParameters::EFFECT_SOLARIZE;
                     break;
-                case ANDROID_CONTROL_EFFECT_SEPIA:
+                case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
                     supportedEffects +=
                         CameraParameters::EFFECT_SEPIA;
                     break;
-                case ANDROID_CONTROL_EFFECT_POSTERIZE:
+                case ANDROID_CONTROL_EFFECT_MODE_POSTERIZE:
                     supportedEffects +=
                         CameraParameters::EFFECT_POSTERIZE;
                     break;
-                case ANDROID_CONTROL_EFFECT_WHITEBOARD:
+                case ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD:
                     supportedEffects +=
                         CameraParameters::EFFECT_WHITEBOARD;
                     break;
-                case ANDROID_CONTROL_EFFECT_BLACKBOARD:
+                case ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD:
                     supportedEffects +=
                         CameraParameters::EFFECT_BLACKBOARD;
                     break;
-                case ANDROID_CONTROL_EFFECT_AQUA:
+                case ANDROID_CONTROL_EFFECT_MODE_AQUA:
                     supportedEffects +=
                         CameraParameters::EFFECT_AQUA;
                     break;
@@ -399,7 +399,7 @@
         params.set(CameraParameters::KEY_SUPPORTED_EFFECTS, supportedEffects);
     }
 
-    antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_AUTO;
+    antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
     params.set(CameraParameters::KEY_ANTIBANDING,
             CameraParameters::ANTIBANDING_AUTO);
 
@@ -413,19 +413,19 @@
             if (addComma) supportedAntibanding += ",";
             addComma = true;
             switch (availableAntibandingModes.data.u8[i]) {
-                case ANDROID_CONTROL_AE_ANTIBANDING_OFF:
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
                     supportedAntibanding +=
                         CameraParameters::ANTIBANDING_OFF;
                     break;
-                case ANDROID_CONTROL_AE_ANTIBANDING_50HZ:
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
                     supportedAntibanding +=
                         CameraParameters::ANTIBANDING_50HZ;
                     break;
-                case ANDROID_CONTROL_AE_ANTIBANDING_60HZ:
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
                     supportedAntibanding +=
                         CameraParameters::ANTIBANDING_60HZ;
                     break;
-                case ANDROID_CONTROL_AE_ANTIBANDING_AUTO:
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
                     supportedAntibanding +=
                         CameraParameters::ANTIBANDING_AUTO;
                     break;
@@ -538,7 +538,7 @@
     }
 
     camera_metadata_ro_entry_t flashAvailable =
-        staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1);
+        staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1);
     if (!flashAvailable.count) return NO_INIT;
 
     camera_metadata_ro_entry_t availableAeModes =
@@ -557,7 +557,7 @@
             "," + CameraParameters::FLASH_MODE_TORCH;
         for (size_t i=0; i < availableAeModes.count; i++) {
             if (availableAeModes.data.u8[i] ==
-                    ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE) {
+                    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
                 supportedFlashModes = supportedFlashModes + "," +
                     CameraParameters::FLASH_MODE_RED_EYE;
                 break;
@@ -574,7 +574,7 @@
     }
 
     camera_metadata_ro_entry_t minFocusDistance =
-        staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE, 1, 1);
+        staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1, 1);
     if (!minFocusDistance.count) return NO_INIT;
 
     camera_metadata_ro_entry_t availableAfModes =
@@ -599,28 +599,28 @@
             if (addComma) supportedFocusModes += ",";
             addComma = true;
             switch (availableAfModes.data.u8[i]) {
-                case ANDROID_CONTROL_AF_AUTO:
+                case ANDROID_CONTROL_AF_MODE_AUTO:
                     supportedFocusModes +=
                         CameraParameters::FOCUS_MODE_AUTO;
                     break;
-                case ANDROID_CONTROL_AF_MACRO:
+                case ANDROID_CONTROL_AF_MODE_MACRO:
                     supportedFocusModes +=
                         CameraParameters::FOCUS_MODE_MACRO;
                     break;
-                case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO:
+                case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
                     supportedFocusModes +=
                         CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
                     break;
-                case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE:
+                case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
                     supportedFocusModes +=
                         CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
                     break;
-                case ANDROID_CONTROL_AF_EDOF:
+                case ANDROID_CONTROL_AF_MODE_EDOF:
                     supportedFocusModes +=
                         CameraParameters::FOCUS_MODE_EDOF;
                     break;
                 // Not supported in old API
-                case ANDROID_CONTROL_AF_OFF:
+                case ANDROID_CONTROL_AF_MODE_OFF:
                     addComma = false;
                     break;
                 default:
@@ -651,14 +651,14 @@
     focusingAreas.add(Parameters::Area(0,0,0,0,0));
 
     camera_metadata_ro_entry_t availableFocalLengths =
-        staticInfo(ANDROID_LENS_AVAILABLE_FOCAL_LENGTHS);
+        staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
     if (!availableFocalLengths.count) return NO_INIT;
 
     float minFocalLength = availableFocalLengths.data.f[0];
     params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
 
     camera_metadata_ro_entry_t sensorSize =
-        staticInfo(ANDROID_SENSOR_PHYSICAL_SIZE, 2, 2);
+        staticInfo(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2, 2);
     if (!sensorSize.count) return NO_INIT;
 
     // The fields of view here assume infinity focus, maximum wide angle
@@ -674,7 +674,7 @@
                 exposureCompensation);
 
     camera_metadata_ro_entry_t exposureCompensationRange =
-        staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE, 2, 2);
+        staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 2, 2);
     if (!exposureCompensationRange.count) return NO_INIT;
 
     params.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,
@@ -683,7 +683,7 @@
             exposureCompensationRange.data.i32[0]);
 
     camera_metadata_ro_entry_t exposureCompensationStep =
-        staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_STEP, 1, 1);
+        staticInfo(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1, 1);
     if (!exposureCompensationStep.count) return NO_INIT;
 
     params.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,
@@ -713,7 +713,7 @@
     params.set(CameraParameters::KEY_MAX_ZOOM, NUM_ZOOM_STEPS - 1);
 
     camera_metadata_ro_entry_t maxDigitalZoom =
-        staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM, /*minCount*/1, /*maxCount*/1);
+        staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, /*minCount*/1, /*maxCount*/1);
     if (!maxDigitalZoom.count) return NO_INIT;
 
     {
@@ -811,31 +811,31 @@
 status_t Parameters::buildFastInfo() {
 
     camera_metadata_ro_entry_t activeArraySize =
-        staticInfo(ANDROID_SENSOR_ACTIVE_ARRAY_SIZE, 2, 2);
+        staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 2);
     if (!activeArraySize.count) return NO_INIT;
     int32_t arrayWidth = activeArraySize.data.i32[0];
     int32_t arrayHeight = activeArraySize.data.i32[1];
 
     camera_metadata_ro_entry_t availableFaceDetectModes =
-        staticInfo(ANDROID_STATS_AVAILABLE_FACE_DETECT_MODES);
+        staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
     if (!availableFaceDetectModes.count) return NO_INIT;
 
     uint8_t bestFaceDetectMode =
-        ANDROID_STATS_FACE_DETECTION_OFF;
+        ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
     for (size_t i = 0 ; i < availableFaceDetectModes.count; i++) {
         switch (availableFaceDetectModes.data.u8[i]) {
-            case ANDROID_STATS_FACE_DETECTION_OFF:
+            case ANDROID_STATISTICS_FACE_DETECT_MODE_OFF:
                 break;
-            case ANDROID_STATS_FACE_DETECTION_SIMPLE:
+            case ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE:
                 if (bestFaceDetectMode !=
-                        ANDROID_STATS_FACE_DETECTION_FULL) {
+                        ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
                     bestFaceDetectMode =
-                        ANDROID_STATS_FACE_DETECTION_SIMPLE;
+                        ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
                 }
                 break;
-            case ANDROID_STATS_FACE_DETECTION_FULL:
+            case ANDROID_STATISTICS_FACE_DETECT_MODE_FULL:
                 bestFaceDetectMode =
-                    ANDROID_STATS_FACE_DETECTION_FULL;
+                    ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
                 break;
             default:
                 ALOGE("%s: Camera %d: Unknown face detect mode %d:",
@@ -846,7 +846,7 @@
     }
 
     camera_metadata_ro_entry_t maxFacesDetected =
-        staticInfo(ANDROID_STATS_MAX_FACE_COUNT, 1, 1);
+        staticInfo(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1, 1);
     if (!maxFacesDetected.count) return NO_INIT;
 
     int32_t maxFaces = maxFacesDetected.data.i32[0];
@@ -856,7 +856,7 @@
     camera_metadata_ro_entry_t sceneModeOverrides =
         staticInfo(ANDROID_CONTROL_SCENE_MODE_OVERRIDES);
     camera_metadata_ro_entry_t minFocusDistance =
-        staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE);
+        staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
     bool fixedLens = (minFocusDistance.data.f[0] == 0);
 
     if (sceneModeOverrides.count > 0) {
@@ -877,16 +877,16 @@
             uint8_t aeMode =
                     sceneModeOverrides.data.u8[i * kModesPerSceneMode + 0];
             switch(aeMode) {
-                case ANDROID_CONTROL_AE_ON:
+                case ANDROID_CONTROL_AE_MODE_ON:
                     modes.flashMode = FLASH_MODE_OFF;
                     break;
-                case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
+                case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
                     modes.flashMode = FLASH_MODE_AUTO;
                     break;
-                case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
+                case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
                     modes.flashMode = FLASH_MODE_ON;
                     break;
-                case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE:
+                case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
                     modes.flashMode = FLASH_MODE_RED_EYE;
                     break;
                 default:
@@ -900,15 +900,15 @@
             uint8_t afMode =
                     sceneModeOverrides.data.u8[i * kModesPerSceneMode + 2];
             switch(afMode) {
-                case ANDROID_CONTROL_AF_OFF:
+                case ANDROID_CONTROL_AF_MODE_OFF:
                     modes.focusMode = fixedLens ?
                             FOCUS_MODE_FIXED : FOCUS_MODE_INFINITY;
                     break;
-                case ANDROID_CONTROL_AF_AUTO:
-                case ANDROID_CONTROL_AF_MACRO:
-                case ANDROID_CONTROL_AF_CONTINUOUS_VIDEO:
-                case ANDROID_CONTROL_AF_CONTINUOUS_PICTURE:
-                case ANDROID_CONTROL_AF_EDOF:
+                case ANDROID_CONTROL_AF_MODE_AUTO:
+                case ANDROID_CONTROL_AF_MODE_MACRO:
+                case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+                case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+                case ANDROID_CONTROL_AF_MODE_EDOF:
                     modes.focusMode = static_cast<focusMode_t>(afMode);
                     break;
                 default:
@@ -1363,7 +1363,7 @@
 
     if (validatedParams.flashMode != flashMode) {
         camera_metadata_ro_entry_t flashAvailable =
-            staticInfo(ANDROID_FLASH_AVAILABLE, 1, 1);
+            staticInfo(ANDROID_FLASH_INFO_AVAILABLE, 1, 1);
         if (!flashAvailable.data.u8[0] &&
                 validatedParams.flashMode != Parameters::FLASH_MODE_OFF) {
             ALOGE("%s: Requested flash mode \"%s\" is not supported: "
@@ -1400,9 +1400,9 @@
                 fastInfo.sceneModeOverrides.
                         valueFor(validatedParams.sceneMode).wbMode;
     } else {
-        validatedParams.wbMode = ANDROID_CONTROL_AWB_OFF;
+        validatedParams.wbMode = ANDROID_CONTROL_AWB_MODE_OFF;
     }
-    if (validatedParams.wbMode == ANDROID_CONTROL_AWB_OFF) {
+    if (validatedParams.wbMode == ANDROID_CONTROL_AWB_MODE_OFF) {
         validatedParams.wbMode = wbModeStringToEnum(
             newParams.get(CameraParameters::KEY_WHITE_BALANCE) );
     }
@@ -1439,7 +1439,7 @@
         validatedParams.currentAfTriggerId = -1;
         if (validatedParams.focusMode != Parameters::FOCUS_MODE_FIXED) {
             camera_metadata_ro_entry_t minFocusDistance =
-                staticInfo(ANDROID_LENS_MINIMUM_FOCUS_DISTANCE);
+                staticInfo(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
             if (minFocusDistance.data.f[0] == 0) {
                 ALOGE("%s: Requested focus mode \"%s\" is not available: "
                         "fixed focus lens",
@@ -1489,7 +1489,7 @@
     validatedParams.exposureCompensation =
         newParams.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
     camera_metadata_ro_entry_t exposureCompensationRange =
-        staticInfo(ANDROID_CONTROL_AE_EXP_COMPENSATION_RANGE);
+        staticInfo(ANDROID_CONTROL_AE_COMPENSATION_RANGE);
     if ((validatedParams.exposureCompensation <
             exposureCompensationRange.data.i32[0]) ||
         (validatedParams.exposureCompensation >
@@ -1585,7 +1585,7 @@
     ATRACE_CALL();
     status_t res;
 
-    uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+    uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
     res = request->update(ANDROID_REQUEST_METADATA_MODE,
             &metadataMode, 1);
     if (res != OK) return res;
@@ -1612,9 +1612,9 @@
     // to the other.
     bool sceneModeActive =
             sceneMode != (uint8_t)ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
-    uint8_t reqControlMode = ANDROID_CONTROL_AUTO;
+    uint8_t reqControlMode = ANDROID_CONTROL_MODE_AUTO;
     if (enableFaceDetect || sceneModeActive) {
-        reqControlMode = ANDROID_CONTROL_USE_SCENE_MODE;
+        reqControlMode = ANDROID_CONTROL_MODE_USE_SCENE_MODE;
     }
     res = request->update(ANDROID_CONTROL_MODE,
             &reqControlMode, 1);
@@ -1628,21 +1628,21 @@
             &reqSceneMode, 1);
     if (res != OK) return res;
 
-    uint8_t reqFlashMode = ANDROID_FLASH_OFF;
-    uint8_t reqAeMode = ANDROID_CONTROL_AE_OFF;
+    uint8_t reqFlashMode = ANDROID_FLASH_MODE_OFF;
+    uint8_t reqAeMode = ANDROID_CONTROL_AE_MODE_OFF;
     switch (flashMode) {
         case Parameters::FLASH_MODE_OFF:
-            reqAeMode = ANDROID_CONTROL_AE_ON; break;
+            reqAeMode = ANDROID_CONTROL_AE_MODE_ON; break;
         case Parameters::FLASH_MODE_AUTO:
-            reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break;
+            reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; break;
         case Parameters::FLASH_MODE_ON:
-            reqAeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break;
+            reqAeMode = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; break;
         case Parameters::FLASH_MODE_TORCH:
-            reqAeMode = ANDROID_CONTROL_AE_ON;
-            reqFlashMode = ANDROID_FLASH_TORCH;
+            reqAeMode = ANDROID_CONTROL_AE_MODE_ON;
+            reqFlashMode = ANDROID_FLASH_MODE_TORCH;
             break;
         case Parameters::FLASH_MODE_RED_EYE:
-            reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break;
+            reqAeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; break;
         default:
             ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__,
                     cameraId, flashMode);
@@ -1666,7 +1666,7 @@
     if (res != OK) return res;
 
     float reqFocusDistance = 0; // infinity focus in diopters
-    uint8_t reqFocusMode = ANDROID_CONTROL_AF_OFF;
+    uint8_t reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
     switch (focusMode) {
         case Parameters::FOCUS_MODE_AUTO:
         case Parameters::FOCUS_MODE_MACRO:
@@ -1677,7 +1677,7 @@
             break;
         case Parameters::FOCUS_MODE_INFINITY:
         case Parameters::FOCUS_MODE_FIXED:
-            reqFocusMode = ANDROID_CONTROL_AF_OFF;
+            reqFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
             break;
         default:
                 ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__,
@@ -1716,7 +1716,7 @@
     if (res != OK) return res;
     delete[] reqFocusingAreas;
 
-    res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
+    res = request->update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
             &exposureCompensation, 1);
     if (res != OK) return res;
 
@@ -1758,16 +1758,16 @@
     if (res != OK) return res;
 
     uint8_t reqVstabMode = videoStabilization ?
-            ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
-            ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
+            ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON :
+            ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
     res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
             &reqVstabMode, 1);
     if (res != OK) return res;
 
     uint8_t reqFaceDetectMode = enableFaceDetect ?
             fastInfo.bestFaceDetectMode :
-            (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
-    res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
+            (uint8_t)ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+    res = request->update(ANDROID_STATISTICS_FACE_DETECT_MODE,
             &reqFaceDetectMode, 1);
     if (res != OK) return res;
 
@@ -1891,43 +1891,43 @@
 int Parameters::wbModeStringToEnum(const char *wbMode) {
     return
         !wbMode ?
-            ANDROID_CONTROL_AWB_AUTO :
+            ANDROID_CONTROL_AWB_MODE_AUTO :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_AUTO) ?
-            ANDROID_CONTROL_AWB_AUTO :
+            ANDROID_CONTROL_AWB_MODE_AUTO :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_INCANDESCENT) ?
-            ANDROID_CONTROL_AWB_INCANDESCENT :
+            ANDROID_CONTROL_AWB_MODE_INCANDESCENT :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_FLUORESCENT) ?
-            ANDROID_CONTROL_AWB_FLUORESCENT :
+            ANDROID_CONTROL_AWB_MODE_FLUORESCENT :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT) ?
-            ANDROID_CONTROL_AWB_WARM_FLUORESCENT :
+            ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_DAYLIGHT) ?
-            ANDROID_CONTROL_AWB_DAYLIGHT :
+            ANDROID_CONTROL_AWB_MODE_DAYLIGHT :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT) ?
-            ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT :
+            ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_TWILIGHT) ?
-            ANDROID_CONTROL_AWB_TWILIGHT :
+            ANDROID_CONTROL_AWB_MODE_TWILIGHT :
         !strcmp(wbMode, CameraParameters::WHITE_BALANCE_SHADE) ?
-            ANDROID_CONTROL_AWB_SHADE :
+            ANDROID_CONTROL_AWB_MODE_SHADE :
         -1;
 }
 
 const char* Parameters::wbModeEnumToString(uint8_t wbMode) {
     switch (wbMode) {
-        case ANDROID_CONTROL_AWB_AUTO:
+        case ANDROID_CONTROL_AWB_MODE_AUTO:
             return CameraParameters::WHITE_BALANCE_AUTO;
-        case ANDROID_CONTROL_AWB_INCANDESCENT:
+        case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
             return CameraParameters::WHITE_BALANCE_INCANDESCENT;
-        case ANDROID_CONTROL_AWB_FLUORESCENT:
+        case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
             return CameraParameters::WHITE_BALANCE_FLUORESCENT;
-        case ANDROID_CONTROL_AWB_WARM_FLUORESCENT:
+        case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT:
             return CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT;
-        case ANDROID_CONTROL_AWB_DAYLIGHT:
+        case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
             return CameraParameters::WHITE_BALANCE_DAYLIGHT;
-        case ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT:
+        case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT:
             return CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT;
-        case ANDROID_CONTROL_AWB_TWILIGHT:
+        case ANDROID_CONTROL_AWB_MODE_TWILIGHT:
             return CameraParameters::WHITE_BALANCE_TWILIGHT;
-        case ANDROID_CONTROL_AWB_SHADE:
+        case ANDROID_CONTROL_AWB_MODE_SHADE:
             return CameraParameters::WHITE_BALANCE_SHADE;
         default:
             ALOGE("%s: Unknown AWB mode enum: %d",
@@ -1939,40 +1939,40 @@
 int Parameters::effectModeStringToEnum(const char *effectMode) {
     return
         !effectMode ?
-            ANDROID_CONTROL_EFFECT_OFF :
+            ANDROID_CONTROL_EFFECT_MODE_OFF :
         !strcmp(effectMode, CameraParameters::EFFECT_NONE) ?
-            ANDROID_CONTROL_EFFECT_OFF :
+            ANDROID_CONTROL_EFFECT_MODE_OFF :
         !strcmp(effectMode, CameraParameters::EFFECT_MONO) ?
-            ANDROID_CONTROL_EFFECT_MONO :
+            ANDROID_CONTROL_EFFECT_MODE_MONO :
         !strcmp(effectMode, CameraParameters::EFFECT_NEGATIVE) ?
-            ANDROID_CONTROL_EFFECT_NEGATIVE :
+            ANDROID_CONTROL_EFFECT_MODE_NEGATIVE :
         !strcmp(effectMode, CameraParameters::EFFECT_SOLARIZE) ?
-            ANDROID_CONTROL_EFFECT_SOLARIZE :
+            ANDROID_CONTROL_EFFECT_MODE_SOLARIZE :
         !strcmp(effectMode, CameraParameters::EFFECT_SEPIA) ?
-            ANDROID_CONTROL_EFFECT_SEPIA :
+            ANDROID_CONTROL_EFFECT_MODE_SEPIA :
         !strcmp(effectMode, CameraParameters::EFFECT_POSTERIZE) ?
-            ANDROID_CONTROL_EFFECT_POSTERIZE :
+            ANDROID_CONTROL_EFFECT_MODE_POSTERIZE :
         !strcmp(effectMode, CameraParameters::EFFECT_WHITEBOARD) ?
-            ANDROID_CONTROL_EFFECT_WHITEBOARD :
+            ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD :
         !strcmp(effectMode, CameraParameters::EFFECT_BLACKBOARD) ?
-            ANDROID_CONTROL_EFFECT_BLACKBOARD :
+            ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD :
         !strcmp(effectMode, CameraParameters::EFFECT_AQUA) ?
-            ANDROID_CONTROL_EFFECT_AQUA :
+            ANDROID_CONTROL_EFFECT_MODE_AQUA :
         -1;
 }
 
 int Parameters::abModeStringToEnum(const char *abMode) {
     return
         !abMode ?
-            ANDROID_CONTROL_AE_ANTIBANDING_AUTO :
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO :
         !strcmp(abMode, CameraParameters::ANTIBANDING_AUTO) ?
-            ANDROID_CONTROL_AE_ANTIBANDING_AUTO :
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO :
         !strcmp(abMode, CameraParameters::ANTIBANDING_OFF) ?
-            ANDROID_CONTROL_AE_ANTIBANDING_OFF :
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF :
         !strcmp(abMode, CameraParameters::ANTIBANDING_50HZ) ?
-            ANDROID_CONTROL_AE_ANTIBANDING_50HZ :
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ :
         !strcmp(abMode, CameraParameters::ANTIBANDING_60HZ) ?
-            ANDROID_CONTROL_AE_ANTIBANDING_60HZ :
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ :
         -1;
 }
 
@@ -2329,7 +2329,7 @@
     // chosen to maximize its area on the sensor
 
     camera_metadata_ro_entry_t maxDigitalZoom =
-            staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
+            staticInfo(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
     // For each zoom step by how many pixels more do we change the zoom
     float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
             (NUM_ZOOM_STEPS-1);
diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h
index 6d32bf6..4192e97 100644
--- a/services/camera/libcameraservice/camera2/Parameters.h
+++ b/services/camera/libcameraservice/camera2/Parameters.h
@@ -73,16 +73,16 @@
         FLASH_MODE_AUTO,
         FLASH_MODE_ON,
         FLASH_MODE_TORCH,
-        FLASH_MODE_RED_EYE = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE,
+        FLASH_MODE_RED_EYE = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE,
         FLASH_MODE_INVALID = -1
     } flashMode;
 
     enum focusMode_t {
-        FOCUS_MODE_AUTO = ANDROID_CONTROL_AF_AUTO,
-        FOCUS_MODE_MACRO = ANDROID_CONTROL_AF_MACRO,
-        FOCUS_MODE_CONTINUOUS_VIDEO = ANDROID_CONTROL_AF_CONTINUOUS_VIDEO,
-        FOCUS_MODE_CONTINUOUS_PICTURE = ANDROID_CONTROL_AF_CONTINUOUS_PICTURE,
-        FOCUS_MODE_EDOF = ANDROID_CONTROL_AF_EDOF,
+        FOCUS_MODE_AUTO = ANDROID_CONTROL_AF_MODE_AUTO,
+        FOCUS_MODE_MACRO = ANDROID_CONTROL_AF_MODE_MACRO,
+        FOCUS_MODE_CONTINUOUS_VIDEO = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
+        FOCUS_MODE_CONTINUOUS_PICTURE = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
+        FOCUS_MODE_EDOF = ANDROID_CONTROL_AF_MODE_EDOF,
         FOCUS_MODE_INFINITY,
         FOCUS_MODE_FIXED,
         FOCUS_MODE_INVALID = -1
@@ -179,7 +179,7 @@
             focusMode_t focusMode;
             OverrideModes():
                     flashMode(FLASH_MODE_INVALID),
-                    wbMode(ANDROID_CONTROL_AWB_OFF),
+                    wbMode(ANDROID_CONTROL_AWB_MODE_OFF),
                     focusMode(FOCUS_MODE_INVALID) {
             }
         };