Merge "Warmup cycles must be in range and consecutive"
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index e5e4e90..3dbf75e 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -526,7 +526,7 @@
         !strcmp(format, PIXEL_FORMAT_RGBA8888) ?
             HAL_PIXEL_FORMAT_RGBA_8888 :    // RGB8888
         !strcmp(format, PIXEL_FORMAT_BAYER_RGGB) ?
-            HAL_PIXEL_FORMAT_RAW_SENSOR :   // Raw sensor data
+            HAL_PIXEL_FORMAT_RAW16 :   // Raw sensor data
         -1;
 }
 
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index fc3e437..a75cb48 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -209,6 +209,20 @@
         return status;
     }
 
+    virtual status_t setTorchMode(const String16& cameraId, bool enabled,
+            const sp<IBinder>& clientBinder)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
+        data.writeString16(cameraId);
+        data.writeInt32(enabled ? 1 : 0);
+        data.writeStrongBinder(clientBinder);
+        remote()->transact(BnCameraService::SET_TORCH_MODE, data, &reply);
+
+        if (readExceptionCode(reply)) return -EPROTO;
+        return reply.readInt32();
+    }
+
     // connect to camera service (pro client)
     virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, int cameraId,
                                 const String16 &clientPackageName, int clientUid,
@@ -490,6 +504,16 @@
             }
             return NO_ERROR;
         } break;
+        case SET_TORCH_MODE: {
+            CHECK_INTERFACE(ICameraService, data, reply);
+            String16 cameraId = data.readString16();
+            bool enabled = data.readInt32() != 0 ? true : false;
+            const sp<IBinder> clientBinder = data.readStrongBinder();
+            status_t status = setTorchMode(cameraId, enabled, clientBinder);
+            reply->writeNoException();
+            reply->writeInt32(status);
+            return NO_ERROR;
+        } break;
         default:
             return BBinder::onTransact(code, data, reply, flags);
     }
diff --git a/camera/ICameraServiceListener.cpp b/camera/ICameraServiceListener.cpp
index b2f1729..90a8bc2 100644
--- a/camera/ICameraServiceListener.cpp
+++ b/camera/ICameraServiceListener.cpp
@@ -29,6 +29,7 @@
 namespace {
     enum {
         STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION,
+        TORCH_STATUS_CHANGED,
     };
 }; // namespace anonymous
 
@@ -54,8 +55,21 @@
                            data,
                            &reply,
                            IBinder::FLAG_ONEWAY);
+    }
 
-        reply.readExceptionCode();
+    virtual void onTorchStatusChanged(TorchStatus status, const String16 &cameraId)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(
+                              ICameraServiceListener::getInterfaceDescriptor());
+
+        data.writeInt32(static_cast<int32_t>(status));
+        data.writeString16(cameraId);
+
+        remote()->transact(TORCH_STATUS_CHANGED,
+                           data,
+                           &reply,
+                           IBinder::FLAG_ONEWAY);
     }
 };
 
@@ -75,7 +89,16 @@
             int32_t cameraId = data.readInt32();
 
             onStatusChanged(status, cameraId);
-            reply->writeNoException();
+
+            return NO_ERROR;
+        } break;
+        case TORCH_STATUS_CHANGED: {
+            CHECK_INTERFACE(ICameraServiceListener, data, reply);
+
+            TorchStatus status = static_cast<TorchStatus>(data.readInt32());
+            String16 cameraId = data.readString16();
+
+            onTorchStatusChanged(status, cameraId);
 
             return NO_ERROR;
         } break;
diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp
index 1f5867a..24b2327 100644
--- a/camera/tests/ProCameraTests.cpp
+++ b/camera/tests/ProCameraTests.cpp
@@ -89,6 +89,12 @@
         mCondition.broadcast();
     }
 
+    void onTorchStatusChanged(TorchStatus status, const String16& cameraId) {
+        dout << "On torch status changed: 0x" << std::hex
+             << (unsigned int) status << " cameraId " << cameraId.string()
+             << std::endl;
+    }
+
     status_t waitForStatusChange(Status& newStatus) {
         Mutex::Autolock al(mMutex);
 
@@ -469,7 +475,7 @@
         CMP_STR(NV16, YCbCr_422_SP);
         CMP_STR(NV21, YCrCb_420_SP);
         CMP_STR(YUY2, YCbCr_422_I);
-        CMP_STR(RAW,  RAW_SENSOR);
+        CMP_STR(RAW,  RAW16);
         CMP_STR(RGBA, RGBA_8888);
 
         std::cerr << "Unknown format string " << str << std::endl;
diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h
index f7f06bb..cc41efe 100644
--- a/include/camera/ICameraService.h
+++ b/include/camera/ICameraService.h
@@ -53,6 +53,7 @@
         GET_LEGACY_PARAMETERS,
         SUPPORTS_CAMERA_API,
         CONNECT_LEGACY,
+        SET_TORCH_MODE,
     };
 
     enum {
@@ -142,6 +143,12 @@
             int clientUid,
             /*out*/
             sp<ICamera>& device) = 0;
+
+    /**
+     * Turn on or off a camera's torch mode.
+     */
+    virtual status_t setTorchMode(const String16& cameraId, bool enabled,
+            const sp<IBinder>& clientBinder) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h
index 0a0e43a..9e8b912 100644
--- a/include/camera/ICameraServiceListener.h
+++ b/include/camera/ICameraServiceListener.h
@@ -66,9 +66,33 @@
         STATUS_UNKNOWN          = 0xFFFFFFFF,
     };
 
+    /**
+     * The torch mode status of a camera.
+     *
+     * Initial status will be transmitted with onTorchStatusChanged immediately
+     * after this listener is added to the service listener list.
+     */
+    enum TorchStatus {
+        // The camera's torch mode has become available to use via
+        // setTorchMode().
+        TORCH_STATUS_AVAILABLE      = TORCH_MODE_STATUS_AVAILABLE,
+        // The camera's torch mode has become not available to use via
+        // setTorchMode().
+        TORCH_STATUS_NOT_AVAILABLE  = TORCH_MODE_STATUS_RESOURCE_BUSY,
+        // The camera's torch mode has been turned off by setTorchMode().
+        TORCH_STATUS_OFF            = TORCH_MODE_STATUS_OFF,
+        // The camera's torch mode has been turned on by setTorchMode().
+        TORCH_STATUS_ON             = 0x80000000,
+
+        // Use to initialize variables only
+        TORCH_STATUS_UNKNOWN        = 0xFFFFFFFF,
+    };
+
     DECLARE_META_INTERFACE(CameraServiceListener);
 
     virtual void onStatusChanged(Status status, int32_t cameraId) = 0;
+
+    virtual void onTorchStatusChanged(TorchStatus status, const String16& cameraId) = 0;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index db62cd5..4153c25 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -56,6 +56,7 @@
     virtual status_t        stop() = 0;
     virtual status_t        pause() = 0;
     virtual status_t        isPlaying(bool* state) = 0;
+    virtual status_t        setPlaybackRate(float rate) = 0;
     virtual status_t        seekTo(int msec) = 0;
     virtual status_t        getCurrentPosition(int* msec) = 0;
     virtual status_t        getDuration(int* msec) = 0;
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 4a6bf28..482b85f 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -156,6 +156,7 @@
     virtual status_t    stop() = 0;
     virtual status_t    pause() = 0;
     virtual bool        isPlaying() = 0;
+    virtual status_t    setPlaybackRate(float rate) { return INVALID_OPERATION; }
     virtual status_t    seekTo(int msec) = 0;
     virtual status_t    getCurrentPosition(int *msec) = 0;
     virtual status_t    getDuration(int *msec) = 0;
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 5830933..808e893 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -220,6 +220,7 @@
             status_t        stop();
             status_t        pause();
             bool            isPlaying();
+            status_t        setPlaybackRate(float rate);
             status_t        getVideoWidth(int *w);
             status_t        getVideoHeight(int *h);
             status_t        seekTo(int msec);
@@ -274,6 +275,7 @@
     int                         mVideoWidth;
     int                         mVideoHeight;
     int                         mAudioSessionId;
+    float                       mPlaybackRate;
     float                       mSendLevel;
     struct sockaddr_in          mRetransmitEndpoint;
     bool                        mRetransmitEndpointValid;
diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h
index 899b324..a195fe8 100644
--- a/include/media/stagefright/MPEG4Writer.h
+++ b/include/media/stagefright/MPEG4Writer.h
@@ -26,6 +26,7 @@
 
 namespace android {
 
+class AMessage;
 class MediaBuffer;
 class MediaSource;
 class MetaData;
@@ -48,6 +49,7 @@
     virtual status_t dump(int fd, const Vector<String16>& args);
 
     void beginBox(const char *fourcc);
+    void beginBox(uint32_t id);
     void writeInt8(int8_t x);
     void writeInt16(int16_t x);
     void writeInt32(int32_t x);
@@ -62,6 +64,7 @@
     int32_t getTimeScale() const { return mTimeScale; }
 
     status_t setGeoData(int latitudex10000, int longitudex10000);
+    status_t setCaptureRate(float captureFps);
     virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
     virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
 
@@ -88,6 +91,7 @@
     off64_t mFreeBoxOffset;
     bool mStreamableFile;
     off64_t mEstimatedMoovBoxSize;
+    off64_t mMoovExtraSize;
     uint32_t mInterleaveDurationUs;
     int32_t mTimeScale;
     int64_t mStartTimestampUs;
@@ -102,6 +106,8 @@
 
     List<off64_t> mBoxes;
 
+    sp<AMessage> mMetaKeys;
+
     void setStartTimestampUs(int64_t timeUs);
     int64_t getStartTimestampUs();  // Not const
     status_t startTracks(MetaData *params);
@@ -195,6 +201,12 @@
     void writeGeoDataBox();
     void writeLatitude(int degreex10000);
     void writeLongitude(int degreex10000);
+
+    void addDeviceMeta();
+    void writeHdlr();
+    void writeKeys();
+    void writeIlst();
+    void writeMetaBox();
     void sendSessionSummary();
     void release();
     status_t reset();
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 7f3e5cc..dcd5670 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -39,6 +39,7 @@
     START,
     STOP,
     IS_PLAYING,
+    SET_PLAYBACK_RATE,
     PAUSE,
     SEEK_TO,
     GET_CURRENT_POSITION,
@@ -164,6 +165,15 @@
         return reply.readInt32();
     }
 
+    status_t setPlaybackRate(float rate)
+    {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeFloat(rate);
+        remote()->transact(SET_PLAYBACK_RATE, data, &reply);
+        return reply.readInt32();
+    }
+
     status_t pause()
     {
         Parcel data, reply;
@@ -426,6 +436,11 @@
             reply->writeInt32(ret);
             return NO_ERROR;
         } break;
+        case SET_PLAYBACK_RATE: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            reply->writeInt32(setPlaybackRate(data.readFloat()));
+            return NO_ERROR;
+        } break;
         case PAUSE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             reply->writeInt32(pause());
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 432ecda..d1d51cc 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -59,6 +59,7 @@
     mLoop = false;
     mLeftVolume = mRightVolume = 1.0;
     mVideoWidth = mVideoHeight = 0;
+    mPlaybackRate = 1.0;
     mLockThreadId = 0;
     mAudioSessionId = AudioSystem::newAudioUniqueId();
     AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
@@ -378,6 +379,24 @@
     return false;
 }
 
+status_t MediaPlayer::setPlaybackRate(float rate)
+{
+    ALOGV("setPlaybackRate: %f", rate);
+    if (rate <= 0.0) {
+        return BAD_VALUE;
+    }
+    Mutex::Autolock _l(mLock);
+    if (mPlayer != 0) {
+        if (mPlaybackRate == rate) {
+            return NO_ERROR;
+        }
+        mPlaybackRate = rate;
+        return mPlayer->setPlaybackRate(rate);
+    }
+    ALOGV("setPlaybackRate: no active player");
+    return INVALID_OPERATION;
+}
+
 status_t MediaPlayer::getVideoWidth(int *w)
 {
     ALOGV("getVideoWidth");
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 694f1a4..0b18ae0 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -961,6 +961,14 @@
     return NO_ERROR;
 }
 
+status_t MediaPlayerService::Client::setPlaybackRate(float rate)
+{
+    ALOGV("[%d] setPlaybackRate(%f)", mConnId, rate);
+    sp<MediaPlayerBase> p = getPlayer();
+    if (p == 0) return UNKNOWN_ERROR;
+    return p->setPlaybackRate(rate);
+}
+
 status_t MediaPlayerService::Client::getCurrentPosition(int *msec)
 {
     ALOGV("getCurrentPosition");
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index fad3447..7320311 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -261,6 +261,7 @@
         virtual status_t        stop();
         virtual status_t        pause();
         virtual status_t        isPlaying(bool* state);
+        virtual status_t        setPlaybackRate(float rate);
         virtual status_t        seekTo(int msec);
         virtual status_t        getCurrentPosition(int* msec);
         virtual status_t        getDuration(int* msec);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 2551040..55763f0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -75,6 +75,7 @@
       mAudioSource(AUDIO_SOURCE_CNT),
       mVideoSource(VIDEO_SOURCE_LIST_END),
       mCaptureTimeLapse(false),
+      mCaptureFps(0.0f),
       mStarted(false) {
 
     ALOGV("Constructor");
@@ -263,6 +264,31 @@
     return OK;
 }
 
+// Attempt to parse an float literal optionally surrounded by whitespace,
+// returns true on success, false otherwise.
+static bool safe_strtof(const char *s, float *val) {
+    char *end;
+
+    // It is lame, but according to man page, we have to set errno to 0
+    // before calling strtof().
+    errno = 0;
+    *val = strtof(s, &end);
+
+    if (end == s || errno == ERANGE) {
+        return false;
+    }
+
+    // Skip trailing whitespace
+    while (isspace(*end)) {
+        ++end;
+    }
+
+    // For a successful return, the string must contain nothing but a valid
+    // float literal optionally surrounded by whitespace.
+
+    return *end == '\0';
+}
+
 // Attempt to parse an int64 literal optionally surrounded by whitespace,
 // returns true on success, false otherwise.
 static bool safe_strtoi64(const char *s, int64_t *val) {
@@ -546,8 +572,10 @@
     return OK;
 }
 
-status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
-    ALOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+status_t StagefrightRecorder::setParamTimeLapseFps(float fps) {
+    ALOGV("setParamTimeLapseFps: %.2f", fps);
+
+    int64_t timeUs = (int64_t) (1000000.0 / fps + 0.5f);
 
     // Not allowing time more than a day
     if (timeUs <= 0 || timeUs > 86400*1E6) {
@@ -555,6 +583,7 @@
         return BAD_VALUE;
     }
 
+    mCaptureFps = fps;
     mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
     return OK;
 }
@@ -682,11 +711,10 @@
         if (safe_strtoi32(value.string(), &timeLapseEnable)) {
             return setParamTimeLapseEnable(timeLapseEnable);
         }
-    } else if (key == "time-between-time-lapse-frame-capture") {
-        int64_t timeBetweenTimeLapseFrameCaptureUs;
-        if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureUs)) {
-            return setParamTimeBetweenTimeLapseFrameCapture(
-                    timeBetweenTimeLapseFrameCaptureUs);
+    } else if (key == "time-lapse-fps") {
+        float fps;
+        if (safe_strtof(value.string(), &fps)) {
+            return setParamTimeLapseFps(fps);
         }
     } else {
         ALOGE("setParameter: failed to find key %s", key.string());
@@ -1581,10 +1609,11 @@
 
     status_t err = OK;
     sp<MediaWriter> writer;
+    sp<MPEG4Writer> mp4writer;
     if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
         writer = new WebmWriter(mOutputFd);
     } else {
-        writer = new MPEG4Writer(mOutputFd);
+        writer = mp4writer = new MPEG4Writer(mOutputFd);
     }
 
     if (mVideoSource < VIDEO_SOURCE_LIST_END) {
@@ -1617,13 +1646,15 @@
             mTotalBitRate += mAudioBitRate;
         }
 
+        if (mCaptureTimeLapse) {
+            mp4writer->setCaptureRate(mCaptureFps);
+        }
+
         if (mInterleaveDurationUs > 0) {
-            reinterpret_cast<MPEG4Writer *>(writer.get())->
-                setInterleaveDuration(mInterleaveDurationUs);
+            mp4writer->setInterleaveDuration(mInterleaveDurationUs);
         }
         if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) {
-            reinterpret_cast<MPEG4Writer *>(writer.get())->
-                setGeoData(mLatitudex10000, mLongitudex10000);
+            mp4writer->setGeoData(mLatitudex10000, mLongitudex10000);
         }
     }
     if (mMaxFileDurationUs != 0) {
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index b5a49d3..f34c229 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -109,6 +109,7 @@
     int32_t mTotalBitRate;
 
     bool mCaptureTimeLapse;
+    float mCaptureFps;
     int64_t mTimeBetweenTimeLapseFrameCaptureUs;
     sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
 
@@ -155,7 +156,7 @@
     status_t setParamAudioSamplingRate(int32_t sampleRate);
     status_t setParamAudioTimeScale(int32_t timeScale);
     status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
-    status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+    status_t setParamTimeLapseFps(float fps);
     status_t setParamVideoEncodingBitRate(int32_t bitRate);
     status_t setParamVideoIFramesInterval(int32_t seconds);
     status_t setParamVideoEncoderProfile(int32_t profile);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 9b446b8..cbc5e0d 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -130,23 +130,37 @@
 
 status_t NuPlayer::GenericSource::initFromDataSource() {
     sp<MediaExtractor> extractor;
+    String8 mimeType;
+    float confidence;
+    sp<AMessage> dummy;
+    bool isWidevineStreaming = false;
 
     CHECK(mDataSource != NULL);
 
     if (mIsWidevine) {
-        String8 mimeType;
-        float confidence;
-        sp<AMessage> dummy;
-        bool success;
-
-        success = SniffWVM(mDataSource, &mimeType, &confidence, &dummy);
-        if (!success
-                || strcasecmp(
+        isWidevineStreaming = SniffWVM(
+                mDataSource, &mimeType, &confidence, &dummy);
+        if (!isWidevineStreaming ||
+                strcasecmp(
                     mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) {
             ALOGE("unsupported widevine mime: %s", mimeType.string());
             return UNKNOWN_ERROR;
         }
+    } else if (mIsStreaming) {
+        if (mSniffedMIME.empty()) {
+            if (!mDataSource->sniff(&mimeType, &confidence, &dummy)) {
+                return UNKNOWN_ERROR;
+            }
+            mSniffedMIME = mimeType.string();
+        }
+        isWidevineStreaming = !strcasecmp(
+                mSniffedMIME.c_str(), MEDIA_MIMETYPE_CONTAINER_WVM);
+    }
 
+    if (isWidevineStreaming) {
+        // we don't want cached source for widevine streaming.
+        mCachedSource.clear();
+        mDataSource = mHttpSource;
         mWVMExtractor = new WVMExtractor(mDataSource);
         mWVMExtractor->setAdaptiveStreamingMode(true);
         if (mUIDValid) {
@@ -181,14 +195,6 @@
             if (mFileMeta->findCString(kKeyMIMEType, &fileMime)
                     && !strncasecmp(fileMime, "video/wvm", 9)) {
                 mIsWidevine = true;
-                if (!mUri.empty()) {
-                  // streaming, but the app forgot to specify widevine:// url
-                  mWVMExtractor = static_cast<WVMExtractor *>(extractor.get());
-                  mWVMExtractor->setAdaptiveStreamingMode(true);
-                  if (mUIDValid) {
-                    mWVMExtractor->setUID(mUID);
-                  }
-                }
             }
         }
     }
@@ -656,10 +662,10 @@
     int32_t kbps = 0;
     status_t err = UNKNOWN_ERROR;
 
-    if (mCachedSource != NULL) {
-        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
-    } else if (mWVMExtractor != NULL) {
+    if (mWVMExtractor != NULL) {
         err = mWVMExtractor->getEstimatedBandwidthKbps(&kbps);
+    } else if (mCachedSource != NULL) {
+        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
     }
 
     if (err == OK) {
@@ -681,7 +687,13 @@
     int64_t cachedDurationUs = -1ll;
     ssize_t cachedDataRemaining = -1;
 
-    if (mCachedSource != NULL) {
+    ALOGW_IF(mWVMExtractor != NULL && mCachedSource != NULL,
+            "WVMExtractor and NuCachedSource both present");
+
+    if (mWVMExtractor != NULL) {
+        cachedDurationUs =
+                mWVMExtractor->getCachedDurationUs(&finalStatus);
+    } else if (mCachedSource != NULL) {
         cachedDataRemaining =
                 mCachedSource->approxDataRemaining(&finalStatus);
 
@@ -697,9 +709,6 @@
                 cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;
             }
         }
-    } else if (mWVMExtractor != NULL) {
-        cachedDurationUs
-            = mWVMExtractor->getCachedDurationUs(&finalStatus);
     }
 
     if (finalStatus != OK) {
diff --git a/media/libmediaplayerservice/nuplayer/MediaClock.cpp b/media/libmediaplayerservice/nuplayer/MediaClock.cpp
index 7bfff13..9152da1 100644
--- a/media/libmediaplayerservice/nuplayer/MediaClock.cpp
+++ b/media/libmediaplayerservice/nuplayer/MediaClock.cpp
@@ -20,19 +20,17 @@
 
 #include "MediaClock.h"
 
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 
 namespace android {
 
-// Maximum time change between two updates.
-static const int64_t kMaxAnchorFluctuationUs = 1000ll;
-
 MediaClock::MediaClock()
     : mAnchorTimeMediaUs(-1),
       mAnchorTimeRealUs(-1),
       mMaxTimeMediaUs(INT64_MAX),
       mStartingTimeMediaUs(-1),
-      mPaused(false) {
+      mPlaybackRate(1.0) {
 }
 
 MediaClock::~MediaClock() {
@@ -58,14 +56,14 @@
         return;
     }
 
+    Mutex::Autolock autoLock(mLock);
     int64_t nowUs = ALooper::GetNowUs();
-    int64_t nowMediaUs = anchorTimeMediaUs + nowUs - anchorTimeRealUs;
+    int64_t nowMediaUs =
+        anchorTimeMediaUs + (nowUs - anchorTimeRealUs) * (double)mPlaybackRate;
     if (nowMediaUs < 0) {
         ALOGW("reject anchor time since it leads to negative media time.");
         return;
     }
-
-    Mutex::Autolock autoLock(mLock);
     mAnchorTimeRealUs = nowUs;
     mAnchorTimeMediaUs = nowMediaUs;
     mMaxTimeMediaUs = maxTimeMediaUs;
@@ -76,60 +74,66 @@
     mMaxTimeMediaUs = maxTimeMediaUs;
 }
 
-void MediaClock::pause() {
+void MediaClock::setPlaybackRate(float rate) {
+    CHECK_GE(rate, 0.0);
     Mutex::Autolock autoLock(mLock);
-    if (mPaused) {
-        return;
-    }
-
-    mPaused = true;
     if (mAnchorTimeRealUs == -1) {
+        mPlaybackRate = rate;
         return;
     }
 
     int64_t nowUs = ALooper::GetNowUs();
-    mAnchorTimeMediaUs += nowUs - mAnchorTimeRealUs;
+    mAnchorTimeMediaUs += (nowUs - mAnchorTimeRealUs) * (double)mPlaybackRate;
     if (mAnchorTimeMediaUs < 0) {
-        ALOGW("anchor time should not be negative, set to 0.");
+        ALOGW("setRate: anchor time should not be negative, set to 0.");
         mAnchorTimeMediaUs = 0;
     }
     mAnchorTimeRealUs = nowUs;
+    mPlaybackRate = rate;
 }
 
-void MediaClock::resume() {
+status_t MediaClock::getMediaTime(
+        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) {
     Mutex::Autolock autoLock(mLock);
-    if (!mPaused) {
-        return;
-    }
-
-    mPaused = false;
-    if (mAnchorTimeRealUs == -1) {
-        return;
-    }
-
-    mAnchorTimeRealUs = ALooper::GetNowUs();
+    return getMediaTime_l(realUs, outMediaUs, allowPastMaxTime);
 }
 
-int64_t MediaClock::getTimeMedia(int64_t realUs, bool allowPastMaxTime) {
-    Mutex::Autolock autoLock(mLock);
+status_t MediaClock::getMediaTime_l(
+        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) {
     if (mAnchorTimeRealUs == -1) {
-        return -1ll;
+        return NO_INIT;
     }
 
-    if (mPaused) {
-        realUs = mAnchorTimeRealUs;
+    int64_t mediaUs = mAnchorTimeMediaUs
+            + (realUs - mAnchorTimeRealUs) * (double)mPlaybackRate;
+    if (mediaUs > mMaxTimeMediaUs && !allowPastMaxTime) {
+        mediaUs = mMaxTimeMediaUs;
     }
-    int64_t currentMediaUs = mAnchorTimeMediaUs + realUs - mAnchorTimeRealUs;
-    if (currentMediaUs > mMaxTimeMediaUs && !allowPastMaxTime) {
-        currentMediaUs = mMaxTimeMediaUs;
+    if (mediaUs < mStartingTimeMediaUs) {
+        mediaUs = mStartingTimeMediaUs;
     }
-    if (currentMediaUs < mStartingTimeMediaUs) {
-        currentMediaUs = mStartingTimeMediaUs;
+    if (mediaUs < 0) {
+        mediaUs = 0;
     }
-    if (currentMediaUs < 0) {
-        currentMediaUs = 0;
+    *outMediaUs = mediaUs;
+    return OK;
+}
+
+status_t MediaClock::getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) {
+    Mutex::Autolock autoLock(mLock);
+    if (mPlaybackRate == 0.0) {
+        return NO_INIT;
     }
-    return currentMediaUs;
+
+    int64_t nowUs = ALooper::GetNowUs();
+    int64_t nowMediaUs;
+    status_t status =
+            getMediaTime_l(nowUs, &nowMediaUs, true /* allowPastMaxTime */);
+    if (status != OK) {
+        return status;
+    }
+    *outRealUs = (targetMediaUs - nowMediaUs) / (double)mPlaybackRate + nowUs;
+    return OK;
 }
 
 }  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/MediaClock.h b/media/libmediaplayerservice/nuplayer/MediaClock.h
index d005993..660764f 100644
--- a/media/libmediaplayerservice/nuplayer/MediaClock.h
+++ b/media/libmediaplayerservice/nuplayer/MediaClock.h
@@ -32,9 +32,8 @@
     void setStartingTimeMedia(int64_t startingTimeMediaUs);
 
     void clearAnchor();
-    // It's highly recommended to use timestamp of just rendered frame as
-    // anchor time, especially in paused state. Such restriction will be
-    // required when dynamic playback rate is supported in the future.
+    // It's required to use timestamp of just rendered frame as
+    // anchor time in paused state.
     void updateAnchor(
         int64_t anchorTimeMediaUs,
         int64_t anchorTimeRealUs,
@@ -42,15 +41,25 @@
 
     void updateMaxTimeMedia(int64_t maxTimeMediaUs);
 
-    void pause();
-    void resume();
+    void setPlaybackRate(float rate);
 
-    int64_t getTimeMedia(int64_t realUs, bool allowPastMaxTime = false);
+    // query media time corresponding to real time |realUs|, and save the
+    // result in |outMediaUs|.
+    status_t getMediaTime(int64_t realUs,
+                          int64_t *outMediaUs,
+                          bool allowPastMaxTime = false);
+    // query real time corresponding to media time |targetMediaUs|.
+    // The result is saved in |outRealUs|.
+    status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs);
 
 protected:
     virtual ~MediaClock();
 
 private:
+    status_t getMediaTime_l(int64_t realUs,
+                            int64_t *outMediaUs,
+                            bool allowPastMaxTime);
+
     Mutex mLock;
 
     int64_t mAnchorTimeMediaUs;
@@ -58,7 +67,7 @@
     int64_t mMaxTimeMediaUs;
     int64_t mStartingTimeMediaUs;
 
-    bool mPaused;
+    float mPlaybackRate;
 
     DISALLOW_EVIL_CONSTRUCTORS(MediaClock);
 };
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index fb8dbce..0d19fe9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -180,6 +180,7 @@
       mFlushingVideo(NONE),
       mResumePending(false),
       mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
+      mPlaybackRate(1.0),
       mStarted(false),
       mPaused(false),
       mPausedByClient(false) {
@@ -314,6 +315,12 @@
     (new AMessage(kWhatStart, id()))->post();
 }
 
+void NuPlayer::setPlaybackRate(float rate) {
+    sp<AMessage> msg = new AMessage(kWhatSetRate, id());
+    msg->setFloat("rate", rate);
+    msg->post();
+}
+
 void NuPlayer::pause() {
     (new AMessage(kWhatPause, id()))->post();
 }
@@ -604,6 +611,16 @@
             break;
         }
 
+        case kWhatSetRate:
+        {
+            ALOGV("kWhatSetRate");
+            CHECK(msg->findFloat("rate", &mPlaybackRate));
+            if (mRenderer != NULL) {
+                mRenderer->setPlaybackRate(mPlaybackRate);
+            }
+            break;
+        }
+
         case kWhatScanSources:
         {
             int32_t generation;
@@ -1048,6 +1065,9 @@
     ++mRendererGeneration;
     notify->setInt32("generation", mRendererGeneration);
     mRenderer = new Renderer(mAudioSink, notify, flags);
+    if (mPlaybackRate != 1.0) {
+        mRenderer->setPlaybackRate(mPlaybackRate);
+    }
 
     mRendererLooper = new ALooper;
     mRendererLooper->setName("NuPlayerRenderer");
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 57eaf74..a2cb53e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -51,6 +51,7 @@
             const sp<IGraphicBufferProducer> &bufferProducer);
 
     void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink);
+    void setPlaybackRate(float rate);
     void start();
 
     void pause();
@@ -104,6 +105,7 @@
         kWhatSetVideoNativeWindow       = '=NaW',
         kWhatSetAudioSink               = '=AuS',
         kWhatMoreDataQueued             = 'more',
+        kWhatSetRate                    = 'setR',
         kWhatStart                      = 'strt',
         kWhatScanSources                = 'scan',
         kWhatVideoNotify                = 'vidN',
@@ -175,6 +177,7 @@
 
     int32_t mVideoScalingMode;
 
+    float mPlaybackRate;
     bool mStarted;
 
     // Actual pause state, either as requested by client or due to buffering.
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index abfa4d3..5887e50 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -341,6 +341,11 @@
     return mState == STATE_RUNNING && !mAtEOS;
 }
 
+status_t NuPlayerDriver::setPlaybackRate(float rate) {
+    mPlayer->setPlaybackRate(rate);
+    return OK;
+}
+
 status_t NuPlayerDriver::seekTo(int msec) {
     ALOGD("seekTo(%p) %d ms", this, msec);
     Mutex::Autolock autoLock(mLock);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 5cba7d9..e53abcd 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -47,6 +47,7 @@
     virtual status_t stop();
     virtual status_t pause();
     virtual bool isPlaying();
+    virtual status_t setPlaybackRate(float rate);
     virtual status_t seekTo(int msec);
     virtual status_t getCurrentPosition(int *msec);
     virtual status_t getDuration(int *msec);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 7f8680d..d21884b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -67,6 +67,7 @@
       mVideoQueueGeneration(0),
       mAudioDrainGeneration(0),
       mVideoDrainGeneration(0),
+      mPlaybackRate(1.0),
       mAudioFirstAnchorTimeMediaUs(-1),
       mAnchorTimeMediaUs(-1),
       mAnchorNumFramesWritten(-1),
@@ -121,6 +122,12 @@
     msg->post();
 }
 
+void NuPlayer::Renderer::setPlaybackRate(float rate) {
+    sp<AMessage> msg = new AMessage(kWhatSetRate, id());
+    msg->setFloat("rate", rate);
+    msg->post();
+}
+
 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
     {
         Mutex::Autolock autoLock(mLock);
@@ -172,12 +179,7 @@
 
 // Called on any threads.
 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
-    int64_t currentTimeUs = mMediaClock->getTimeMedia(ALooper::GetNowUs());
-    if (currentTimeUs == -1) {
-        return NO_INIT;
-    }
-    *mediaUs = currentTimeUs;
-    return OK;
+    return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
 }
 
 void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
@@ -361,6 +363,16 @@
             break;
         }
 
+        case kWhatSetRate:
+        {
+            CHECK(msg->findFloat("rate", &mPlaybackRate));
+            int32_t ratePermille = (int32_t)(0.5f + 1000 * mPlaybackRate);
+            mPlaybackRate = ratePermille / 1000.0f;
+            mMediaClock->setPlaybackRate(mPlaybackRate);
+            mAudioSink->setPlaybackRatePermille(ratePermille);
+            break;
+        }
+
         case kWhatFlush:
         {
             onFlush(msg);
@@ -541,10 +553,10 @@
 
     if (mAudioFirstAnchorTimeMediaUs >= 0) {
         int64_t nowUs = ALooper::GetNowUs();
+        int64_t nowMediaUs =
+            mAudioFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
         // we don't know how much data we are queueing for offloaded tracks.
-        mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs,
-                                  nowUs - getPlayedOutAudioDurationUs(nowUs),
-                                  INT64_MAX);
+        mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
     }
 
     if (hasEOS) {
@@ -670,21 +682,27 @@
     return !mAudioQueue.empty();
 }
 
+int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
+    int32_t sampleRate = offloadingAudio() ?
+            mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
+    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+    return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
+}
+
+// Calculate duration of pending samples if played at normal rate (i.e., 1.0).
 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
-    int64_t writtenAudioDurationUs =
-        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
+    int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
     return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
 }
 
 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
-    int64_t currentPositionUs =
-            mMediaClock->getTimeMedia(nowUs, true /* allowPastMaxTime */);
-    if (currentPositionUs == -1) {
+    int64_t realUs;
+    if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
         // If failed to get current position, e.g. due to audio clock is
         // not ready, then just play out video immediately without delay.
         return nowUs;
     }
-    return (mediaTimeUs - currentPositionUs) + nowUs;
+    return realUs;
 }
 
 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
@@ -696,9 +714,8 @@
     }
     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
     int64_t nowUs = ALooper::GetNowUs();
-    mMediaClock->updateAnchor(mediaTimeUs,
-                              nowUs + getPendingAudioPlayoutDurationUs(nowUs),
-                              mediaTimeUs);
+    int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
+    mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
     mAnchorTimeMediaUs = mediaTimeUs;
 }
 
@@ -828,9 +845,11 @@
             ALOGV("video late by %lld us (%.2f secs)",
                  mVideoLateByUs, mVideoLateByUs / 1E6);
         } else {
+            int64_t mediaUs = 0;
+            mMediaClock->getMediaTime(realTimeUs, &mediaUs);
             ALOGV("rendering video at media time %.2f secs",
                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
-                    mMediaClock->getTimeMedia(realTimeUs)) / 1E6);
+                    mediaUs) / 1E6);
         }
     } else {
         setVideoLateByUs(0);
@@ -1153,7 +1172,7 @@
         ++mVideoDrainGeneration;
         prepareForMediaRenderingStart_l();
         mPaused = true;
-        mMediaClock->pause();
+        mMediaClock->setPlaybackRate(0.0);
     }
 
     mDrainAudioQueuePending = false;
@@ -1181,7 +1200,7 @@
     {
         Mutex::Autolock autoLock(mLock);
         mPaused = false;
-        mMediaClock->resume();
+        mMediaClock->setPlaybackRate(mPlaybackRate);
 
         if (!mAudioQueue.empty()) {
             postDrainAudioQueue_l();
@@ -1222,6 +1241,7 @@
 // accessing getTimestamp() or getPosition() every time a data buffer with
 // a media time is received.
 //
+// Calculate duration of played samples if played at normal rate (i.e., 1.0).
 int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
     uint32_t numFramesPlayed;
     int64_t numFramesPlayedAt;
@@ -1259,9 +1279,8 @@
         //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
     }
 
-    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
     //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
-    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
+    int64_t durationUs = getDurationUsIfPlayedAtSampleRate(numFramesPlayed)
             + nowUs - numFramesPlayedAt;
     if (durationUs < 0) {
         // Occurs when numFramesPlayed position is very small and the following:
@@ -1400,6 +1419,10 @@
                     &offloadInfo);
 
             if (err == OK) {
+                if (mPlaybackRate != 1.0) {
+                    mAudioSink->setPlaybackRatePermille(
+                            (int32_t)(mPlaybackRate * 1000 + 0.5f));
+                }
                 // If the playback is offloaded to h/w, we pass
                 // the HAL some metadata information.
                 // We don't want to do this for PCM because it
@@ -1455,6 +1478,10 @@
             return err;
         }
         mCurrentPcmInfo = info;
+        if (mPlaybackRate != 1.0) {
+            mAudioSink->setPlaybackRatePermille(
+                    (int32_t)(mPlaybackRate * 1000 + 0.5f));
+        }
         mAudioSink->start();
     }
     if (audioSinkChanged) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index faf3b3f..38843d5 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -48,6 +48,8 @@
 
     void queueEOS(bool audio, status_t finalResult);
 
+    void setPlaybackRate(float rate);
+
     void flush(bool audio, bool notifyComplete);
 
     void signalTimeDiscontinuity();
@@ -100,6 +102,7 @@
         kWhatPostDrainVideoQueue = 'pDVQ',
         kWhatQueueBuffer         = 'queB',
         kWhatQueueEOS            = 'qEOS',
+        kWhatSetRate             = 'setR',
         kWhatFlush               = 'flus',
         kWhatPause               = 'paus',
         kWhatResume              = 'resm',
@@ -138,6 +141,7 @@
     int32_t mVideoDrainGeneration;
 
     sp<MediaClock> mMediaClock;
+    float mPlaybackRate;
     int64_t mAudioFirstAnchorTimeMediaUs;
     int64_t mAnchorTimeMediaUs;
     int64_t mAnchorNumFramesWritten;
@@ -243,6 +247,8 @@
     void startAudioOffloadPauseTimeout();
     void cancelAudioOffloadPauseTimeout();
 
+    int64_t getDurationUsIfPlayedAtSampleRate(uint32_t numFrames);
+
     DISALLOW_EVIL_CONSTRUCTORS(Renderer);
 };
 
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index beb6f20..6f6e362 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -29,6 +29,7 @@
 #include <utils/Log.h>
 
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MetaData.h>
@@ -62,6 +63,14 @@
 static const uint8_t kNalUnitTypePicParamSet = 0x08;
 static const int64_t kInitialDelayTimeUs     = 700000LL;
 
+static const char kMetaKey_Model[]      = "com.android.model";
+static const char kMetaKey_Version[]    = "com.android.version";
+static const char kMetaKey_Build[]      = "com.android.build";
+static const char kMetaKey_CaptureFps[] = "com.android.capture.fps";
+
+/* uncomment to include model and build in meta */
+//#define SHOW_MODEL_BUILD 1
+
 class MPEG4Writer::Track {
 public:
     Track(MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId);
@@ -358,11 +367,14 @@
       mOffset(0),
       mMdatOffset(0),
       mEstimatedMoovBoxSize(0),
+      mMoovExtraSize(0),
       mInterleaveDurationUs(1000000),
       mLatitudex10000(0),
       mLongitudex10000(0),
       mAreGeoTagsAvailable(false),
+      mMetaKeys(new AMessage()),
       mStartTimeOffsetMs(-1) {
+    addDeviceMeta();
 }
 
 MPEG4Writer::~MPEG4Writer() {
@@ -482,6 +494,34 @@
     return OK;
 }
 
+void MPEG4Writer::addDeviceMeta() {
+    // add device info and estimate space in 'moov'
+    char val[PROPERTY_VALUE_MAX];
+    size_t n;
+    // meta size is estimated by adding up the following:
+    // - meta header structures, which occur only once (total 66 bytes)
+    // - size for each key, which consists of a fixed header (32 bytes),
+    //   plus key length and data length.
+    mMoovExtraSize += 66;
+    if (property_get("ro.build.version.release", val, NULL)
+            && (n = strlen(val)) > 0) {
+        mMetaKeys->setString(kMetaKey_Version, val, n + 1);
+        mMoovExtraSize += sizeof(kMetaKey_Version) + n + 32;
+    }
+#ifdef SHOW_MODEL_BUILD
+    if (property_get("ro.product.model", val, NULL)
+            && (n = strlen(val)) > 0) {
+        mMetaKeys->setString(kMetaKey_Model, val, n + 1);
+        mMoovExtraSize += sizeof(kMetaKey_Model) + n + 32;
+    }
+    if (property_get("ro.build.display.id", val, NULL)
+            && (n = strlen(val)) > 0) {
+        mMetaKeys->setString(kMetaKey_Build, val, n + 1);
+        mMoovExtraSize += sizeof(kMetaKey_Build) + n + 32;
+    }
+#endif
+}
+
 int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
     // This implementation is highly experimental/heurisitic.
     //
@@ -535,6 +575,9 @@
         size = MAX_MOOV_BOX_SIZE;
     }
 
+    // Account for the extra stuff (Geo, meta keys, etc.)
+    size += mMoovExtraSize;
+
     ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the"
          " estimated moov size %" PRId64 " bytes",
          mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
@@ -946,6 +989,7 @@
     if (mAreGeoTagsAvailable) {
         writeUdtaBox();
     }
+    writeMetaBox();
     int32_t id = 1;
     for (List<Track *>::iterator it = mTracks.begin();
         it != mTracks.end(); ++it, ++id) {
@@ -1115,6 +1159,14 @@
     return bytes;
 }
 
+void MPEG4Writer::beginBox(uint32_t id) {
+    mBoxes.push_back(mWriteMoovBoxToMemory?
+            mMoovBoxBufferOffset: mOffset);
+
+    writeInt32(0);
+    writeInt32(id);
+}
+
 void MPEG4Writer::beginBox(const char *fourcc) {
     CHECK_EQ(strlen(fourcc), 4);
 
@@ -1239,6 +1291,18 @@
     mLatitudex10000 = latitudex10000;
     mLongitudex10000 = longitudex10000;
     mAreGeoTagsAvailable = true;
+    mMoovExtraSize += 30;
+    return OK;
+}
+
+status_t MPEG4Writer::setCaptureRate(float captureFps) {
+    if (captureFps <= 0.0f) {
+        return BAD_VALUE;
+    }
+
+    mMetaKeys->setFloat(kMetaKey_CaptureFps, captureFps);
+    mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32;
+
     return OK;
 }
 
@@ -3070,6 +3134,103 @@
     endBox();
 }
 
+void MPEG4Writer::writeHdlr() {
+    beginBox("hdlr");
+    writeInt32(0); // Version, Flags
+    writeInt32(0); // Predefined
+    writeFourcc("mdta");
+    writeInt32(0); // Reserved[0]
+    writeInt32(0); // Reserved[1]
+    writeInt32(0); // Reserved[2]
+    writeInt8(0);  // Name (empty)
+    endBox();
+}
+
+void MPEG4Writer::writeKeys() {
+    size_t count = mMetaKeys->countEntries();
+
+    beginBox("keys");
+    writeInt32(0);     // Version, Flags
+    writeInt32(count); // Entry_count
+    for (size_t i = 0; i < count; i++) {
+        AMessage::Type type;
+        const char *key = mMetaKeys->getEntryNameAt(i, &type);
+        size_t n = strlen(key);
+        writeInt32(n + 8);
+        writeFourcc("mdta");
+        write(key, n); // write without the \0
+    }
+    endBox();
+}
+
+void MPEG4Writer::writeIlst() {
+    size_t count = mMetaKeys->countEntries();
+
+    beginBox("ilst");
+    for (size_t i = 0; i < count; i++) {
+        beginBox(i + 1); // key id (1-based)
+        beginBox("data");
+        AMessage::Type type;
+        const char *key = mMetaKeys->getEntryNameAt(i, &type);
+        switch (type) {
+            case AMessage::kTypeString:
+            {
+                AString val;
+                CHECK(mMetaKeys->findString(key, &val));
+                writeInt32(1); // type = UTF8
+                writeInt32(0); // default country/language
+                write(val.c_str(), strlen(val.c_str())); // write without \0
+                break;
+            }
+
+            case AMessage::kTypeFloat:
+            {
+                float val;
+                CHECK(mMetaKeys->findFloat(key, &val));
+                writeInt32(23); // type = float32
+                writeInt32(0);  // default country/language
+                writeInt32(*reinterpret_cast<int32_t *>(&val));
+                break;
+            }
+
+            case AMessage::kTypeInt32:
+            {
+                int32_t val;
+                CHECK(mMetaKeys->findInt32(key, &val));
+                writeInt32(67); // type = signed int32
+                writeInt32(0);  // default country/language
+                writeInt32(val);
+                break;
+            }
+
+            default:
+            {
+                ALOGW("Unsupported key type, writing 0 instead");
+                writeInt32(77); // type = unsigned int32
+                writeInt32(0);  // default country/language
+                writeInt32(0);
+                break;
+            }
+        }
+        endBox(); // data
+        endBox(); // key id
+    }
+    endBox(); // ilst
+}
+
+void MPEG4Writer::writeMetaBox() {
+    size_t count = mMetaKeys->countEntries();
+    if (count == 0) {
+        return;
+    }
+
+    beginBox("meta");
+    writeHdlr();
+    writeKeys();
+    writeIlst();
+    endBox();
+}
+
 /*
  * Geodata is stored according to ISO-6709 standard.
  */
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
index 4debc48..bd01a1a 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
@@ -34,6 +34,9 @@
     params->nVersion.s.nStep = 0;
 }
 
+// Microsoft WAV GSM encoding packs two GSM frames into 65 bytes.
+static const int kMSGSMFrameSize = 65;
+
 SoftGSM::SoftGSM(
         const char *name,
         const OMX_CALLBACKTYPE *callbacks,
@@ -64,7 +67,7 @@
     def.eDir = OMX_DirInput;
     def.nBufferCountMin = kNumBuffers;
     def.nBufferCountActual = def.nBufferCountMin;
-    def.nBufferSize = sizeof(gsm_frame);
+    def.nBufferSize = 1024 / kMSGSMFrameSize * kMSGSMFrameSize;
     def.bEnabled = OMX_TRUE;
     def.bPopulated = OMX_FALSE;
     def.eDomain = OMX_PortDomainAudio;
@@ -207,8 +210,8 @@
             mSignalledError = true;
         }
 
-        if(((inHeader->nFilledLen / 65) * 65) != inHeader->nFilledLen) {
-            ALOGE("input buffer not multiple of 65 (%d).", inHeader->nFilledLen);
+        if(((inHeader->nFilledLen / kMSGSMFrameSize) * kMSGSMFrameSize) != inHeader->nFilledLen) {
+            ALOGE("input buffer not multiple of %d (%d).", kMSGSMFrameSize, inHeader->nFilledLen);
             notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
             mSignalledError = true;
         }
@@ -258,6 +261,25 @@
     return ret;
 }
 
+void SoftGSM::onPortFlushCompleted(OMX_U32 portIndex) {
+    if (portIndex == 0) {
+        gsm_destroy(mGsm);
+        mGsm = gsm_create();
+        int msopt = 1;
+        gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+    }
+}
+
+void SoftGSM::onReset() {
+    gsm_destroy(mGsm);
+    mGsm = gsm_create();
+    int msopt = 1;
+    gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+    mSignalledError = false;
+}
+
+
+
 
 }  // namespace android
 
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
index 8ab6116..0303dea 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -43,6 +43,9 @@
 
     virtual void onQueueFilled(OMX_U32 portIndex);
 
+    virtual void onPortFlushCompleted(OMX_U32 portIndex);
+    virtual void onReset();
+
 private:
     enum {
         kNumBuffers = 4,
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index dfb5e59..2d3a25a 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -33,7 +33,6 @@
 struct LiveDataSource;
 struct M3UParser;
 struct PlaylistFetcher;
-struct Parcel;
 
 struct LiveSession : public AHandler {
     enum Flags {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bf59460..4779d6a 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -158,7 +158,7 @@
     switch (portIndex) {
         case kPortIndexInput:  return "Input";
         case kPortIndexOutput: return "Output";
-        case ~0:               return "All";
+        case ~0U:              return "All";
         default:               return "port";
     }
 }
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 9ad437a..f3780a9 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -826,14 +826,20 @@
     if (ret != NO_ERROR) {
         return false;
     }
-
+    bool mute = true;
     bool state = AUDIO_MODE_INVALID;
     AutoMutex lock(mHardwareLock);
-    audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice();
     mHardwareStatus = AUDIO_HW_GET_MIC_MUTE;
-    dev->get_mic_mute(dev, &state);
+    for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
+        audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
+        status_t result = dev->get_mic_mute(dev, &state);
+        if (result == NO_ERROR) {
+            mute = mute && state;
+        }
+    }
     mHardwareStatus = AUDIO_HW_IDLE;
-    return state;
+
+    return mute;
 }
 
 status_t AudioFlinger::setMasterMute(bool muted)
diff --git a/services/audioflinger/AudioResamplerFirProcessNeon.h b/services/audioflinger/AudioResamplerFirProcessNeon.h
index f311cef..d4fa7ad 100644
--- a/services/audioflinger/AudioResamplerFirProcessNeon.h
+++ b/services/audioflinger/AudioResamplerFirProcessNeon.h
@@ -24,10 +24,6 @@
 #if USE_NEON
 //
 // NEON specializations are enabled for Process() and ProcessL()
-//
-// TODO: Stride 16 and Stride 8 can be combined with one pass stride 8 (if necessary)
-// and looping stride 16 (or vice versa). This has some polyphase coef data alignment
-// issues with S16 coefs. Consider this later.
 
 // Macros to save a mono/stereo accumulator sample in q0 (and q4) as stereo out.
 #define ASSEMBLY_ACCUMULATE_MONO \
@@ -635,513 +631,6 @@
     );
 }
 
-template <>
-inline void ProcessL<1, 8>(int32_t* const out,
-        int count,
-        const int16_t* coefsP,
-        const int16_t* coefsN,
-        const int16_t* sP,
-        const int16_t* sN,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 1; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "veor           q0, q0, q0               \n"// (0 - combines+) accumulator = 0
-
-        "1:                                      \n"
-
-        "vld1.16        {d4}, [%[sP]]            \n"// (2+0d) load 4 16-bits mono samples
-        "vld1.16        {d6}, [%[sN]]!           \n"// (2) load 4 16-bits mono samples
-        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 4 16-bits coefs
-        "vld1.16        {d20}, [%[coefsN0]:64]!  \n"// (1) load 4 16-bits coefs
-
-        "vrev64.16      d4, d4                   \n"// (1) reversed s3, s2, s1, s0, s7, s6, s5, s4
-
-        // reordering the vmal to do d6, d7 before d4, d5 is slower(?)
-        "vmlal.s16      q0, d4, d16              \n"// (1) multiply (reversed)samples by coef
-        "vmlal.s16      q0, d6, d20              \n"// (1) multiply neg samples
-
-        // moving these ARM instructions before neon above seems to be slower
-        "subs           %[count], %[count], #4   \n"// (1) update loop counter
-        "sub            %[sP], %[sP], #8         \n"// (0) move pointer to next set of samples
-
-        // sP used after branch (warning)
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_MONO
-
-        : [out]     "=Uv" (out[0]),
-          [count]   "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsN0] "+r" (coefsN),
-          [sP]      "+r" (sP),
-          [sN]      "+r" (sN)
-        : [vLR]     "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3",
-          "q8", "q10"
-    );
-}
-
-template <>
-inline void ProcessL<2, 8>(int32_t* const out,
-        int count,
-        const int16_t* coefsP,
-        const int16_t* coefsN,
-        const int16_t* sP,
-        const int16_t* sN,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 2; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "veor           q0, q0, q0               \n"// (1) acc_L = 0
-        "veor           q4, q4, q4               \n"// (0 combines+) acc_R = 0
-
-        "1:                                      \n"
-
-        "vld2.16        {d4, d5}, [%[sP]]        \n"// (2+0d) load 8 16-bits stereo samples
-        "vld2.16        {d6, d7}, [%[sN]]!       \n"// (2) load 8 16-bits stereo samples
-        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 8 16-bits coefs
-        "vld1.16        {d20}, [%[coefsN0]:64]!  \n"// (1) load 8 16-bits coefs
-
-        "vrev64.16      q2, q2                   \n"// (1) reverse 8 frames of the left positive
-
-        "vmlal.s16      q0, d4, d16              \n"// (1) multiply (reversed) samples left
-        "vmlal.s16      q4, d5, d16              \n"// (1) multiply (reversed) samples right
-        "vmlal.s16      q0, d6, d20              \n"// (1) multiply samples left
-        "vmlal.s16      q4, d7, d20              \n"// (1) multiply samples right
-
-        // moving these ARM before neon seems to be slower
-        "subs           %[count], %[count], #4   \n"// (1) update loop counter
-        "sub            %[sP], %[sP], #16        \n"// (0) move pointer to next set of samples
-
-        // sP used after branch (warning)
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_STEREO
-
-        : [out] "=Uv" (out[0]),
-          [count] "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsN0] "+r" (coefsN),
-          [sP] "+r" (sP),
-          [sN] "+r" (sN)
-        : [vLR] "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3",
-          "q4", "q5", "q6",
-          "q8", "q10"
-     );
-}
-
-template <>
-inline void Process<1, 8>(int32_t* const out,
-        int count,
-        const int16_t* coefsP,
-        const int16_t* coefsN,
-        const int16_t* coefsP1,
-        const int16_t* coefsN1,
-        const int16_t* sP,
-        const int16_t* sN,
-        uint32_t lerpP,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 1; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase S32 Q15
-        "veor           q0, q0, q0               \n"// (0 - combines+) accumulator = 0
-
-        "1:                                      \n"
-
-        "vld1.16        {d4}, [%[sP]]            \n"// (2+0d) load 4 16-bits mono samples
-        "vld1.16        {d6}, [%[sN]]!           \n"// (2) load 4 16-bits mono samples
-        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 4 16-bits coefs
-        "vld1.16        {d17}, [%[coefsP1]:64]!  \n"// (1) load 4 16-bits coefs for interpolation
-        "vld1.16        {d20}, [%[coefsN1]:64]!  \n"// (1) load 4 16-bits coefs
-        "vld1.16        {d21}, [%[coefsN0]:64]!  \n"// (1) load 4 16-bits coefs for interpolation
-
-        "vsub.s16       d17, d17, d16            \n"// (1) interpolate (step1) 1st set of coefs
-        "vsub.s16       d21, d21, d20            \n"// (1) interpolate (step1) 2nd set of coets
-
-        "vqrdmulh.s16   d17, d17, d2[0]          \n"// (2) interpolate (step2) 1st set of coefs
-        "vqrdmulh.s16   d21, d21, d2[0]          \n"// (2) interpolate (step2) 2nd set of coefs
-
-        "vrev64.16      d4, d4                   \n"// (1) reverse s3, s2, s1, s0, s7, s6, s5, s4
-
-        "vadd.s16       d16, d16, d17            \n"// (1+2d) interpolate (step3) 1st set
-        "vadd.s16       d20, d20, d21            \n"// (1+1d) interpolate (step3) 2nd set
-
-        // reordering the vmal to do d6, d7 before d4, d5 is slower(?)
-        "vmlal.s16      q0, d4, d16              \n"// (1+0d) multiply (reversed)by coef
-        "vmlal.s16      q0, d6, d20              \n"// (1) multiply neg samples
-
-        // moving these ARM instructions before neon above seems to be slower
-        "subs           %[count], %[count], #4   \n"// (1) update loop counter
-        "sub            %[sP], %[sP], #8        \n"// move pointer to next set of samples
-
-        // sP used after branch (warning)
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_MONO
-
-        : [out]     "=Uv" (out[0]),
-          [count]   "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsN0] "+r" (coefsN),
-          [coefsP1] "+r" (coefsP1),
-          [coefsN1] "+r" (coefsN1),
-          [sP]      "+r" (sP),
-          [sN]      "+r" (sN)
-        : [lerpP]   "r" (lerpP),
-          [vLR]     "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3",
-          "q8", "q9", "q10", "q11"
-    );
-}
-
-template <>
-inline void Process<2, 8>(int32_t* const out,
-        int count,
-        const int16_t* coefsP,
-        const int16_t* coefsN,
-        const int16_t* coefsP1,
-        const int16_t* coefsN1,
-        const int16_t* sP,
-        const int16_t* sN,
-        uint32_t lerpP,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 2; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
-        "veor           q0, q0, q0               \n"// (1) acc_L = 0
-        "veor           q4, q4, q4               \n"// (0 combines+) acc_R = 0
-
-        "1:                                      \n"
-
-        "vld2.16        {d4, d5}, [%[sP]]        \n"// (3+0d) load 8 16-bits stereo samples
-        "vld2.16        {d6, d7}, [%[sN]]!       \n"// (3) load 8 16-bits stereo samples
-        "vld1.16        {d16}, [%[coefsP0]:64]!  \n"// (1) load 8 16-bits coefs
-        "vld1.16        {d17}, [%[coefsP1]:64]!  \n"// (1) load 8 16-bits coefs for interpolation
-        "vld1.16        {d20}, [%[coefsN1]:64]!  \n"// (1) load 8 16-bits coefs
-        "vld1.16        {d21}, [%[coefsN0]:64]!  \n"// (1) load 8 16-bits coefs for interpolation
-
-        "vsub.s16       d17, d17, d16            \n"// (1) interpolate (step1) 1st set of coefs
-        "vsub.s16       d21, d21, d20            \n"// (1) interpolate (step1) 2nd set of coets
-
-        "vqrdmulh.s16   d17, d17, d2[0]          \n"// (2) interpolate (step2) 1st set of coefs
-        "vqrdmulh.s16   d21, d21, d2[0]          \n"// (2) interpolate (step2) 2nd set of coefs
-
-        "vrev64.16      q2, q2                   \n"// (1) reverse 8 frames of the left positive
-
-        "vadd.s16       d16, d16, d17            \n"// (1+1d) interpolate (step3) 1st set
-        "vadd.s16       d20, d20, d21            \n"// (1+1d) interpolate (step3) 2nd set
-
-        "vmlal.s16      q0, d4, d16              \n"// (1) multiply (reversed) samples left
-        "vmlal.s16      q4, d5, d16              \n"// (1) multiply (reversed) samples right
-        "vmlal.s16      q0, d6, d20              \n"// (1) multiply samples left
-        "vmlal.s16      q4, d7, d20              \n"// (1) multiply samples right
-
-        // moving these ARM before neon seems to be slower
-        "subs           %[count], %[count], #4   \n"// (1) update loop counter
-        "sub            %[sP], %[sP], #16        \n"// move pointer to next set of samples
-
-        // sP used after branch (warning)
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_STEREO
-
-        : [out] "=Uv" (out[0]),
-          [count] "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsN0] "+r" (coefsN),
-          [coefsP1] "+r" (coefsP1),
-          [coefsN1] "+r" (coefsN1),
-          [sP] "+r" (sP),
-          [sN] "+r" (sN)
-        : [lerpP]   "r" (lerpP),
-          [vLR] "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3",
-          "q4", "q5", "q6",
-          "q8", "q9", "q10", "q11"
-    );
-}
-
-template <>
-inline void ProcessL<1, 8>(int32_t* const out,
-        int count,
-        const int32_t* coefsP,
-        const int32_t* coefsN,
-        const int16_t* sP,
-        const int16_t* sN,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 1; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "veor           q0, q0, q0               \n"// result, initialize to 0
-
-        "1:                                      \n"
-
-        "vld1.16        {d4}, [%[sP]]            \n"// load 4 16-bits mono samples
-        "vld1.16        {d6}, [%[sN]]!           \n"// load 4 16-bits mono samples
-        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
-        "vld1.32        {q10}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs
-
-        "vrev64.16      d4, d4                   \n"// reverse 2 frames of the positive side
-
-        "vshll.s16      q12, d4, #15             \n"// (stall) extend samples to 31 bits
-        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
-
-        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by interpolated coef
-        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by interpolated coef
-
-        "vadd.s32       q0, q0, q12              \n"// accumulate result
-        "vadd.s32       q0, q0, q14              \n"// (stall) accumulate result
-
-        "subs           %[count], %[count], #4   \n"// update loop counter
-        "sub            %[sP], %[sP], #8         \n"// move pointer to next set of samples
-
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_MONO
-
-        : [out] "=Uv" (out[0]),
-          [count] "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsN0] "+r" (coefsN),
-          [sP] "+r" (sP),
-          [sN] "+r" (sN)
-        : [vLR] "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3",
-          "q8", "q9", "q10", "q11",
-          "q12", "q14"
-    );
-}
-
-template <>
-inline void ProcessL<2, 8>(int32_t* const out,
-        int count,
-        const int32_t* coefsP,
-        const int32_t* coefsN,
-        const int16_t* sP,
-        const int16_t* sN,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 2; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "veor           q0, q0, q0               \n"// result, initialize to 0
-        "veor           q4, q4, q4               \n"// result, initialize to 0
-
-        "1:                                      \n"
-
-        "vld2.16        {d4, d5}, [%[sP]]        \n"// load 4 16-bits stereo samples
-        "vld2.16        {d6, d7}, [%[sN]]!       \n"// load 4 16-bits stereo samples
-        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
-        "vld1.32        {q10}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs
-
-        "vrev64.16      q2, q2                   \n"// reverse 2 frames of the positive side
-
-        "vshll.s16      q12, d4, #15             \n"// extend samples to 31 bits
-        "vshll.s16      q13, d5, #15             \n"// extend samples to 31 bits
-
-        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
-        "vshll.s16      q15, d7, #15             \n"// extend samples to 31 bits
-
-        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by coef
-        "vqrdmulh.s32   q13, q13, q8             \n"// multiply samples by coef
-        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by coef
-        "vqrdmulh.s32   q15, q15, q10            \n"// multiply samples by coef
-
-        "vadd.s32       q0, q0, q12              \n"// accumulate result
-        "vadd.s32       q4, q4, q13              \n"// accumulate result
-        "vadd.s32       q0, q0, q14              \n"// accumulate result
-        "vadd.s32       q4, q4, q15              \n"// accumulate result
-
-        "subs           %[count], %[count], #4   \n"// update loop counter
-        "sub            %[sP], %[sP], #16        \n"// move pointer to next set of samples
-
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_STEREO
-
-        : [out]     "=Uv" (out[0]),
-          [count]   "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsN0] "+r" (coefsN),
-          [sP]      "+r" (sP),
-          [sN]      "+r" (sN)
-        : [vLR]     "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3", "q4",
-          "q8", "q9", "q10", "q11",
-          "q12", "q13", "q14", "q15"
-    );
-}
-
-template <>
-inline void Process<1, 8>(int32_t* const out,
-        int count,
-        const int32_t* coefsP,
-        const int32_t* coefsN,
-        const int32_t* coefsP1,
-        const int32_t* coefsN1,
-        const int16_t* sP,
-        const int16_t* sN,
-        uint32_t lerpP,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 1; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
-        "veor           q0, q0, q0               \n"// result, initialize to 0
-
-        "1:                                      \n"
-
-        "vld1.16        {d4}, [%[sP]]            \n"// load 4 16-bits mono samples
-        "vld1.16        {d6}, [%[sN]]!           \n"// load 4 16-bits mono samples
-        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
-        "vld1.32        {q9}, [%[coefsP1]:128]!  \n"// load 4 32-bits coefs for interpolation
-        "vld1.32        {q10}, [%[coefsN1]:128]! \n"// load 4 32-bits coefs
-        "vld1.32        {q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs for interpolation
-
-        "vrev64.16      d4, d4                   \n"// reverse 2 frames of the positive side
-
-        "vsub.s32       q9, q9, q8               \n"// interpolate (step1) 1st set of coefs
-        "vsub.s32       q11, q11, q10            \n"// interpolate (step1) 2nd set of coets
-        "vshll.s16      q12, d4, #15             \n"// extend samples to 31 bits
-
-        "vqrdmulh.s32   q9, q9, d2[0]            \n"// interpolate (step2) 1st set of coefs
-        "vqrdmulh.s32   q11, q11, d2[0]          \n"// interpolate (step2) 2nd set of coefs
-        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
-
-        "vadd.s32       q8, q8, q9               \n"// interpolate (step3) 1st set
-        "vadd.s32       q10, q10, q11            \n"// interpolate (step4) 2nd set
-
-        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by interpolated coef
-        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by interpolated coef
-
-        "vadd.s32       q0, q0, q12              \n"// accumulate result
-        "vadd.s32       q0, q0, q14              \n"// accumulate result
-
-        "subs           %[count], %[count], #4   \n"// update loop counter
-        "sub            %[sP], %[sP], #8         \n"// move pointer to next set of samples
-
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_MONO
-
-        : [out]     "=Uv" (out[0]),
-          [count]   "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsP1] "+r" (coefsP1),
-          [coefsN0] "+r" (coefsN),
-          [coefsN1] "+r" (coefsN1),
-          [sP]      "+r" (sP),
-          [sN]      "+r" (sN)
-        : [lerpP]   "r" (lerpP),
-          [vLR]     "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3",
-          "q8", "q9", "q10", "q11",
-          "q12", "q14"
-    );
-}
-
-template <>
-inline
-void Process<2, 8>(int32_t* const out,
-        int count,
-        const int32_t* coefsP,
-        const int32_t* coefsN,
-        const int32_t* coefsP1,
-        const int32_t* coefsN1,
-        const int16_t* sP,
-        const int16_t* sN,
-        uint32_t lerpP,
-        const int32_t* const volumeLR)
-{
-    const int CHANNELS = 2; // template specialization does not preserve params
-    const int STRIDE = 8;
-    sP -= CHANNELS*((STRIDE>>1)-1);
-    asm (
-        "vmov.32        d2[0], %[lerpP]          \n"// load the positive phase
-        "veor           q0, q0, q0               \n"// result, initialize to 0
-        "veor           q4, q4, q4               \n"// result, initialize to 0
-
-        "1:                                      \n"
-        "vld2.16        {d4, d5}, [%[sP]]        \n"// load 4 16-bits stereo samples
-        "vld2.16        {d6, d7}, [%[sN]]!       \n"// load 4 16-bits stereo samples
-        "vld1.32        {q8}, [%[coefsP0]:128]!  \n"// load 4 32-bits coefs
-        "vld1.32        {q9}, [%[coefsP1]:128]!  \n"// load 4 32-bits coefs for interpolation
-        "vld1.32        {q10}, [%[coefsN1]:128]! \n"// load 4 32-bits coefs
-        "vld1.32        {q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs for interpolation
-
-        "vrev64.16      q2, q2                   \n"// (reversed) 2 frames of the positive side
-
-        "vsub.s32       q9, q9, q8               \n"// interpolate (step1) 1st set of coefs
-        "vsub.s32       q11, q11, q10            \n"// interpolate (step1) 2nd set of coets
-        "vshll.s16      q12, d4, #15             \n"// extend samples to 31 bits
-        "vshll.s16      q13, d5, #15             \n"// extend samples to 31 bits
-
-        "vqrdmulh.s32   q9, q9, d2[0]            \n"// interpolate (step2) 1st set of coefs
-        "vqrdmulh.s32   q11, q11, d2[1]          \n"// interpolate (step3) 2nd set of coefs
-        "vshll.s16      q14, d6, #15             \n"// extend samples to 31 bits
-        "vshll.s16      q15, d7, #15             \n"// extend samples to 31 bits
-
-        "vadd.s32       q8, q8, q9               \n"// interpolate (step3) 1st set
-        "vadd.s32       q10, q10, q11            \n"// interpolate (step4) 2nd set
-
-        "vqrdmulh.s32   q12, q12, q8             \n"// multiply samples by interpolated coef
-        "vqrdmulh.s32   q13, q13, q8             \n"// multiply samples by interpolated coef
-        "vqrdmulh.s32   q14, q14, q10            \n"// multiply samples by interpolated coef
-        "vqrdmulh.s32   q15, q15, q10            \n"// multiply samples by interpolated coef
-
-        "vadd.s32       q0, q0, q12              \n"// accumulate result
-        "vadd.s32       q4, q4, q13              \n"// accumulate result
-        "vadd.s32       q0, q0, q14              \n"// accumulate result
-        "vadd.s32       q4, q4, q15              \n"// accumulate result
-
-        "subs           %[count], %[count], #4   \n"// update loop counter
-        "sub            %[sP], %[sP], #16        \n"// move pointer to next set of samples
-
-        "bne            1b                       \n"// loop
-
-        ASSEMBLY_ACCUMULATE_STEREO
-
-        : [out]     "=Uv" (out[0]),
-          [count]   "+r" (count),
-          [coefsP0] "+r" (coefsP),
-          [coefsP1] "+r" (coefsP1),
-          [coefsN0] "+r" (coefsN),
-          [coefsN1] "+r" (coefsN1),
-          [sP]      "+r" (sP),
-          [sN]      "+r" (sN)
-        : [lerpP]   "r" (lerpP),
-          [vLR]     "r" (volumeLR)
-        : "cc", "memory",
-          "q0", "q1", "q2", "q3", "q4",
-          "q8", "q9", "q10", "q11",
-          "q12", "q13", "q14", "q15"
-    );
-}
-
 #endif //USE_NEON
 
 }; // namespace android
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 78cec31..fa0beaa 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -1722,28 +1722,7 @@
     uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs();
 
     if (!mActive && frames != 0) {
-        start();
-        sp<ThreadBase> thread = mThread.promote();
-        if (thread != 0) {
-            MixerThread *mixerThread = (MixerThread *)thread.get();
-            if (mFrameCount > frames) {
-                // For the first write after being inactive, ensure that we have
-                // enough frames to fill mFrameCount (which should be multiples of
-                // the minimum buffer requirements of the downstream MixerThread).
-                // This provides enough frames for the downstream mixer to begin
-                // (see AudioFlinger::PlaybackThread::Track::isReady()).
-                if (mBufferQueue.size() < kMaxOverFlowBuffers) {
-                    uint32_t startFrames = (mFrameCount - frames);
-                    pInBuffer = new Buffer;
-                    pInBuffer->mBuffer = calloc(1, startFrames * mFrameSize);
-                    pInBuffer->frameCount = startFrames;
-                    pInBuffer->raw = pInBuffer->mBuffer;
-                    mBufferQueue.add(pInBuffer);
-                } else {
-                    ALOGW("OutputTrack::write() %p no more buffers in queue", this);
-                }
-            }
-        }
+        (void) start();
     }
 
     while (waitTimeLeftMs) {
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index de9551d..5d6423a 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -23,6 +23,7 @@
 LOCAL_SRC_FILES:=               \
     CameraService.cpp \
     CameraDeviceFactory.cpp \
+    CameraFlashlight.cpp \
     common/Camera2ClientBase.cpp \
     common/CameraDeviceBase.cpp \
     common/CameraModule.cpp \
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
new file mode 100644
index 0000000..00a70eb
--- /dev/null
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -0,0 +1,520 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraFlashlight"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <cutils/properties.h>
+
+#include "camera/CameraMetadata.h"
+#include "CameraFlashlight.h"
+#include "gui/IGraphicBufferConsumer.h"
+#include "gui/BufferQueue.h"
+#include "camera/camera2/CaptureRequest.h"
+#include "CameraDeviceFactory.h"
+
+
+namespace android {
+
+CameraFlashlight::CameraFlashlight(CameraModule& cameraModule,
+        const camera_module_callbacks_t& callbacks) :
+        mCameraModule(&cameraModule),
+        mCallbacks(&callbacks) {
+}
+
+CameraFlashlight::~CameraFlashlight() {
+}
+
+status_t CameraFlashlight::createFlashlightControl(const String16& cameraId) {
+    ALOGV("%s: creating a flash light control for camera %s", __FUNCTION__,
+            cameraId.string());
+    if (mFlashControl != NULL) {
+        return INVALID_OPERATION;
+    }
+
+    status_t res = OK;
+
+    if (mCameraModule->getRawModule()->module_api_version >=
+            CAMERA_MODULE_API_VERSION_2_4) {
+        mFlashControl = new FlashControl(*mCameraModule, *mCallbacks);
+        if (mFlashControl == NULL) {
+            ALOGV("%s: cannot create flash control for module api v2.4+",
+                     __FUNCTION__);
+            return NO_MEMORY;
+        }
+    } else {
+        uint32_t deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
+
+        if (mCameraModule->getRawModule()->module_api_version >=
+                CAMERA_MODULE_API_VERSION_2_0) {
+            camera_info info;
+            res = mCameraModule->getCameraInfo(
+                    atoi(String8(cameraId).string()), &info);
+            if (res) {
+                ALOGV("%s: failed to get camera info for camera %s",
+                        __FUNCTION__, cameraId.string());
+                return res;
+            }
+            deviceVersion = info.device_version;
+        }
+
+        if (deviceVersion >= CAMERA_DEVICE_API_VERSION_2_0) {
+            CameraDeviceClientFlashControl *flashControl =
+                    new CameraDeviceClientFlashControl(*mCameraModule,
+                                                       *mCallbacks);
+            if (!flashControl) {
+                return NO_MEMORY;
+            }
+
+            mFlashControl = flashControl;
+        }
+        else {
+            // todo: implement for device api 1
+            return INVALID_OPERATION;
+        }
+    }
+
+    return OK;
+}
+
+status_t CameraFlashlight::setTorchMode(const String16& cameraId, bool enabled) {
+    if (!mCameraModule) {
+        return NO_INIT;
+    }
+
+    ALOGV("%s: set torch mode of camera %s to %d", __FUNCTION__,
+            cameraId.string(), enabled);
+
+    status_t res = OK;
+    Mutex::Autolock l(mLock);
+
+    if (mFlashControl == NULL) {
+        res = createFlashlightControl(cameraId);
+        if (res) {
+            return res;
+        }
+        res =  mFlashControl->setTorchMode(cameraId, enabled);
+        return res;
+    }
+
+    // if flash control already exists, turning on torch mode may fail if it's
+    // tied to another camera device for module v2.3 and below.
+    res = mFlashControl->setTorchMode(cameraId, enabled);
+    if (res == BAD_INDEX) {
+        // flash control is tied to another camera device, need to close it and
+        // try again.
+        mFlashControl.clear();
+        res = createFlashlightControl(cameraId);
+        if (res) {
+            return res;
+        }
+        res = mFlashControl->setTorchMode(cameraId, enabled);
+    }
+
+    return res;
+}
+
+bool CameraFlashlight::hasFlashUnit(const String16& cameraId) {
+    status_t res;
+
+    Mutex::Autolock l(mLock);
+
+    if (mFlashControl == NULL) {
+        res = createFlashlightControl(cameraId);
+        if (res) {
+            ALOGE("%s: failed to create flash control for %s ",
+                    __FUNCTION__, cameraId.string());
+            return false;
+        }
+    }
+
+    bool flashUnit = false;
+
+    // if flash control already exists, querying if a camera device has a flash
+    // unit may fail if it's module v1
+    res = mFlashControl->hasFlashUnit(cameraId, &flashUnit);
+    if (res == BAD_INDEX) {
+        // need to close the flash control before query.
+        mFlashControl.clear();
+        res = createFlashlightControl(cameraId);
+        if (res) {
+            ALOGE("%s: failed to create flash control for %s ", __FUNCTION__,
+                    cameraId.string());
+            return false;
+        }
+        res = mFlashControl->hasFlashUnit(cameraId, &flashUnit);
+        if (res) {
+            flashUnit = false;
+        }
+    }
+
+    return flashUnit;
+}
+
+status_t CameraFlashlight::prepareDeviceOpen() {
+    ALOGV("%s: prepare for device open", __FUNCTION__);
+
+    Mutex::Autolock l(mLock);
+
+    if (mCameraModule && mCameraModule->getRawModule()->module_api_version <
+            CAMERA_MODULE_API_VERSION_2_4) {
+        // framework is going to open a camera device, all flash light control
+        // should be closed for backward compatible support.
+        if (mFlashControl != NULL) {
+            mFlashControl.clear();
+        }
+    }
+
+    return OK;
+}
+
+
+FlashControlBase::~FlashControlBase() {
+}
+
+
+FlashControl::FlashControl(CameraModule& cameraModule,
+        const camera_module_callbacks_t& callbacks) :
+    mCameraModule(&cameraModule) {
+}
+
+FlashControl::~FlashControl() {
+}
+
+status_t FlashControl::hasFlashUnit(const String16& cameraId, bool *hasFlash) {
+    if (!hasFlash) {
+        return BAD_VALUE;
+    }
+
+    *hasFlash = false;
+
+    Mutex::Autolock l(mLock);
+
+    if (!mCameraModule) {
+        return NO_INIT;
+    }
+
+    camera_info info;
+    status_t res = mCameraModule->getCameraInfo(atoi(String8(cameraId).string()),
+            &info);
+    if (res != 0) {
+        return res;
+    }
+
+    CameraMetadata metadata;
+    metadata = info.static_camera_characteristics;
+    camera_metadata_entry flashAvailable =
+            metadata.find(ANDROID_FLASH_INFO_AVAILABLE);
+    if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) {
+        *hasFlash = true;
+    }
+
+    return OK;
+}
+
+status_t FlashControl::setTorchMode(const String16& cameraId, bool enabled) {
+    ALOGV("%s: set camera %s torch mode to %d", __FUNCTION__,
+            cameraId.string(), enabled);
+
+    Mutex::Autolock l(mLock);
+    if (!mCameraModule) {
+        return NO_INIT;
+    }
+
+    return mCameraModule->setTorchMode(String8(cameraId).string(), enabled);
+}
+
+CameraDeviceClientFlashControl::CameraDeviceClientFlashControl(
+        CameraModule& cameraModule,
+        const camera_module_callbacks_t& callbacks) :
+        mCameraModule(&cameraModule),
+        mCallbacks(&callbacks),
+        mTorchEnabled(false),
+        mMetadata(NULL) {
+}
+
+CameraDeviceClientFlashControl::~CameraDeviceClientFlashControl() {
+    if (mDevice != NULL) {
+        mDevice->flush();
+        mDevice->deleteStream(mStreamId);
+        mDevice.clear();
+    }
+    if (mMetadata) {
+        delete mMetadata;
+    }
+
+    mAnw.clear();
+    mSurfaceTexture.clear();
+    mProducer.clear();
+    mConsumer.clear();
+
+    if (mTorchEnabled) {
+        if (mCallbacks) {
+            ALOGV("%s: notify the framework that torch was turned off",
+                    __FUNCTION__);
+            mCallbacks->torch_mode_status_change(mCallbacks,
+                    String8(mCameraId).string(), TORCH_MODE_STATUS_OFF);
+        }
+    }
+}
+
+status_t CameraDeviceClientFlashControl::initializeSurface(int32_t width,
+        int32_t height) {
+    status_t res;
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+
+    mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL,
+            true, true);
+    if (mSurfaceTexture == NULL) {
+        return NO_MEMORY;
+    }
+
+    int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    res = mSurfaceTexture->setDefaultBufferSize(width, height);
+    if (res) {
+        return res;
+    }
+    res = mSurfaceTexture->setDefaultBufferFormat(format);
+    if (res) {
+        return res;
+    }
+
+    bool useAsync = false;
+    int32_t consumerUsage;
+    res = mProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &consumerUsage);
+    if (res) {
+        return res;
+    }
+
+    if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+        useAsync = true;
+    }
+
+    mAnw = new Surface(mProducer, useAsync);
+    if (mAnw == NULL) {
+        return NO_MEMORY;
+    }
+    res = mDevice->createStream(mAnw, width, height, format, &mStreamId);
+    if (res) {
+        return res;
+    }
+
+    res = mDevice->configureStreams();
+    if (res) {
+        return res;
+    }
+
+    return res;
+}
+
+status_t CameraDeviceClientFlashControl::getSmallestSurfaceSize(
+        const camera_info& info, int32_t *width, int32_t *height) {
+    if (!width || !height) {
+        return BAD_VALUE;
+    }
+
+    int32_t w = INT32_MAX;
+    int32_t h = 1;
+
+    CameraMetadata metadata;
+    metadata = info.static_camera_characteristics;
+    camera_metadata_entry streamConfigs =
+            metadata.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+    for (size_t i = 0; i < streamConfigs.count; i += 4) {
+        int32_t fmt = streamConfigs.data.i32[i];
+        if (fmt == ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED) {
+            int32_t ww = streamConfigs.data.i32[i + 1];
+            int32_t hh = streamConfigs.data.i32[i + 2];
+
+            if (w* h > ww * hh) {
+                w = ww;
+                h = hh;
+            }
+        }
+    }
+
+    if (w == INT32_MAX) {
+        return NAME_NOT_FOUND;
+    }
+
+    *width = w;
+    *height = h;
+
+    return OK;
+}
+
+status_t CameraDeviceClientFlashControl::connectCameraDevice(
+        const String16& cameraId) {
+    String8 id = String8(cameraId);
+    camera_info info;
+    status_t res = mCameraModule->getCameraInfo(atoi(id.string()), &info);
+    if (res != 0) {
+        ALOGE("%s: failed to get camera info for camera %s", __FUNCTION__,
+                mCameraId.string());
+        return res;
+    }
+
+    mDevice = CameraDeviceFactory::createDevice(atoi(id.string()));
+    if (mDevice == NULL) {
+        return NO_MEMORY;
+    }
+
+    res = mDevice->initialize(mCameraModule);
+    if (res) {
+        goto fail;
+    }
+
+    int32_t width, height;
+    res = getSmallestSurfaceSize(info, &width, &height);
+    if (res) {
+        return res;
+    }
+    res = initializeSurface(width, height);
+    if (res) {
+        goto fail;
+    }
+
+    mCameraId = cameraId;
+
+    return OK;
+
+fail:
+    mDevice.clear();
+    return res;
+}
+
+
+status_t CameraDeviceClientFlashControl::hasFlashUnit(const String16& cameraId,
+        bool *hasFlash) {
+    ALOGV("%s: checking if camera %s has a flash unit", __FUNCTION__,
+            cameraId.string());
+
+    Mutex::Autolock l(mLock);
+    return hasFlashUnitLocked(cameraId, hasFlash);
+
+}
+
+status_t CameraDeviceClientFlashControl::hasFlashUnitLocked(
+        const String16& cameraId, bool *hasFlash) {
+    if (!mCameraModule) {
+        ALOGE("%s: camera module is NULL", __FUNCTION__);
+        return NO_INIT;
+    }
+
+    if (!hasFlash) {
+        return BAD_VALUE;
+    }
+
+    camera_info info;
+    status_t res = mCameraModule->getCameraInfo(
+            atoi(String8(cameraId).string()), &info);
+    if (res != 0) {
+        ALOGE("%s: failed to get camera info for camera %s", __FUNCTION__,
+                cameraId.string());
+        return res;
+    }
+
+    CameraMetadata metadata;
+    metadata = info.static_camera_characteristics;
+    camera_metadata_entry flashAvailable =
+            metadata.find(ANDROID_FLASH_INFO_AVAILABLE);
+    if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) {
+        *hasFlash = true;
+    }
+
+    return OK;
+}
+
+status_t CameraDeviceClientFlashControl::submitTorchRequest(bool enabled) {
+    status_t res;
+
+    if (mMetadata == NULL) {
+        mMetadata = new CameraMetadata();
+        if (mMetadata == NULL) {
+            return NO_MEMORY;
+        }
+        res = mDevice->createDefaultRequest(
+                CAMERA3_TEMPLATE_PREVIEW, mMetadata);
+        if (res) {
+            return res;
+        }
+    }
+
+    uint8_t torchOn = enabled ? ANDROID_FLASH_MODE_TORCH :
+                                ANDROID_FLASH_MODE_OFF;
+
+    mMetadata->update(ANDROID_FLASH_MODE, &torchOn, 1);
+    mMetadata->update(ANDROID_REQUEST_OUTPUT_STREAMS, &mStreamId, 1);
+
+    int32_t requestId = 0;
+    mMetadata->update(ANDROID_REQUEST_ID, &requestId, 1);
+
+    List<const CameraMetadata> metadataRequestList;
+    metadataRequestList.push_back(*mMetadata);
+
+    int64_t lastFrameNumber = 0;
+    res = mDevice->captureList(metadataRequestList, &lastFrameNumber);
+
+    return res;
+}
+
+
+status_t CameraDeviceClientFlashControl::setTorchMode(
+        const String16& cameraId, bool enabled) {
+    bool hasFlash = false;
+
+    Mutex::Autolock l(mLock);
+    status_t res = hasFlashUnitLocked(cameraId, &hasFlash);
+
+    // pre-check
+    if (enabled) {
+        // invalid camera?
+        if (res) {
+            return -EINVAL;
+        }
+        // no flash unit?
+        if (!hasFlash) {
+            return -ENOSYS;
+        }
+        // already opened for a different device?
+        if (mDevice != NULL && cameraId != mCameraId) {
+            return BAD_INDEX;
+        }
+    } else if (mDevice == NULL || cameraId != mCameraId) {
+        // disabling the torch mode of an un-opened or different device.
+        return OK;
+    }
+
+    if (mDevice == NULL) {
+        res = connectCameraDevice(cameraId);
+        if (res) {
+            return res;
+        }
+    }
+
+    res = submitTorchRequest(enabled);
+    if (res) {
+        return res;
+    }
+
+    mTorchEnabled = enabled;
+    return OK;
+}
+
+}
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
new file mode 100644
index 0000000..a0de0b0
--- /dev/null
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H
+#define ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H
+
+#include "hardware/camera_common.h"
+#include "utils/KeyedVector.h"
+#include "gui/GLConsumer.h"
+#include "gui/Surface.h"
+#include "common/CameraDeviceBase.h"
+
+namespace android {
+
+/**
+ * FlashControlBase is a base class for flash control. It defines the functions
+ * that a flash control for each camera module/device version should implement.
+ */
+class FlashControlBase : public virtual VirtualLightRefBase {
+    public:
+        virtual ~FlashControlBase();
+
+        // Whether a camera device has a flash unit. Calling this function may
+        // cause the torch mode to be turned off in HAL v1 devices. If
+        // previously-on torch mode is turned off,
+        // callbacks.torch_mode_status_change() should be invoked.
+        virtual status_t hasFlashUnit(const String16& cameraId,
+                    bool *hasFlash) = 0;
+
+        // set the torch mode to on or off.
+        virtual status_t setTorchMode(const String16& cameraId,
+                    bool enabled) = 0;
+};
+
+/**
+ * CameraFlashlight can be used by camera service to control flashflight.
+ */
+class CameraFlashlight : public virtual VirtualLightRefBase {
+    public:
+        CameraFlashlight(CameraModule& cameraModule,
+                const camera_module_callbacks_t& callbacks);
+        virtual ~CameraFlashlight();
+
+        // set the torch mode to on or off.
+        status_t setTorchMode(const String16& cameraId, bool enabled);
+
+        // Whether a camera device has a flash unit. Calling this function may
+        // cause the torch mode to be turned off in HAL v1 devices.
+        bool hasFlashUnit(const String16& cameraId);
+
+        // Notify CameraFlashlight that camera service is going to open a camera
+        // device. CameraFlashlight will free the resources that may cause the
+        // camera open to fail. Camera service must call this function before
+        // opening a camera device.
+        status_t prepareDeviceOpen();
+
+    private:
+        // create flashlight control based on camera module API and camera
+        // device API versions.
+        status_t createFlashlightControl(const String16& cameraId);
+
+        sp<FlashControlBase> mFlashControl;
+        CameraModule *mCameraModule;
+        const camera_module_callbacks_t *mCallbacks;
+
+        Mutex mLock;
+};
+
+/**
+ * Flash control for camera module v2.4 and above.
+ */
+class FlashControl : public FlashControlBase {
+    public:
+        FlashControl(CameraModule& cameraModule,
+                const camera_module_callbacks_t& callbacks);
+        virtual ~FlashControl();
+
+        // FlashControlBase
+        status_t hasFlashUnit(const String16& cameraId, bool *hasFlash);
+        status_t setTorchMode(const String16& cameraId, bool enabled);
+
+    private:
+        CameraModule *mCameraModule;
+
+        Mutex mLock;
+};
+
+/**
+ * Flash control for camera module <= v2.3 and camera HAL v2-v3
+ */
+class CameraDeviceClientFlashControl : public FlashControlBase {
+    public:
+        CameraDeviceClientFlashControl(CameraModule& cameraModule,
+                const camera_module_callbacks_t& callbacks);
+        virtual ~CameraDeviceClientFlashControl();
+
+        // FlashControlBase
+        status_t setTorchMode(const String16& cameraId, bool enabled);
+        status_t hasFlashUnit(const String16& cameraId, bool *hasFlash);
+
+    private:
+        // connect to a camera device
+        status_t connectCameraDevice(const String16& cameraId);
+
+        // initialize a surface
+        status_t initializeSurface(int32_t width, int32_t height);
+
+        // submit a request with the given torch mode
+        status_t submitTorchRequest(bool enabled);
+
+        // get the smallest surface size of IMPLEMENTATION_DEFINED
+        status_t getSmallestSurfaceSize(const camera_info& info, int32_t *width,
+                    int32_t *height);
+
+        status_t hasFlashUnitLocked(const String16& cameraId, bool *hasFlash);
+
+        CameraModule *mCameraModule;
+        const camera_module_callbacks_t *mCallbacks;
+        String16 mCameraId;
+        bool mTorchEnabled;
+        CameraMetadata *mMetadata;
+
+        sp<CameraDeviceBase> mDevice;
+
+        sp<IGraphicBufferProducer> mProducer;
+        sp<IGraphicBufferConsumer>  mConsumer;
+        sp<GLConsumer> mSurfaceTexture;
+        sp<ANativeWindow> mAnw;
+        int32_t mStreamId;
+
+        Mutex mLock;
+};
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c1b7806..d65ac21 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -87,6 +87,38 @@
         camera_id,
         new_status);
 }
+
+static void torch_mode_status_change(
+        const struct camera_module_callbacks* callbacks,
+        const char* camera_id,
+        int new_status) {
+    if (!callbacks || !camera_id) {
+        ALOGE("%s invalid parameters. callbacks %p, camera_id %p", __FUNCTION__,
+                callbacks, camera_id);
+    }
+    sp<CameraService> cs = const_cast<CameraService*>(
+                                static_cast<const CameraService*>(callbacks));
+
+    ICameraServiceListener::TorchStatus status;
+    switch (new_status) {
+        case TORCH_MODE_STATUS_AVAILABLE:
+            status = ICameraServiceListener::TORCH_STATUS_AVAILABLE;
+            break;
+        case TORCH_MODE_STATUS_RESOURCE_BUSY:
+            status = ICameraServiceListener::TORCH_STATUS_NOT_AVAILABLE;
+            break;
+        case TORCH_MODE_STATUS_OFF:
+            status = ICameraServiceListener::TORCH_STATUS_OFF;
+            break;
+        default:
+            ALOGE("Unknown torch status %d", new_status);
+            return;
+    }
+
+    cs->onTorchStatusChanged(
+        String16(camera_id),
+        status);
+}
 } // extern "C"
 
 // ----------------------------------------------------------------------------
@@ -96,7 +128,7 @@
 static CameraService *gCameraService;
 
 CameraService::CameraService()
-    :mSoundRef(0), mModule(0)
+    :mSoundRef(0), mModule(0), mFlashlight(0)
 {
     ALOGI("CameraService started (pid=%d)", getpid());
     gCameraService = this;
@@ -106,6 +138,8 @@
     }
 
     this->camera_device_status_change = android::camera_device_status_change;
+    this->torch_mode_status_change = android::torch_mode_status_change;
+
 }
 
 void CameraService::onFirstRef()
@@ -122,6 +156,8 @@
     }
     else {
         mModule = new CameraModule(rawModule);
+        mFlashlight = new CameraFlashlight(*mModule, *this);
+
         const hw_module_t *common = mModule->getRawModule();
         ALOGI("Loaded \"%s\" camera module", common->name);
         mNumberOfCameras = mModule->getNumberOfCameras();
@@ -132,6 +168,12 @@
         }
         for (int i = 0; i < mNumberOfCameras; i++) {
             setCameraFree(i);
+
+            String16 cameraName = String16(String8::format("%d", i));
+            if (mFlashlight->hasFlashUnit(cameraName)) {
+                mTorchStatusMap.add(cameraName,
+                        ICameraServiceListener::TORCH_STATUS_AVAILABLE);
+            }
         }
 
         if (common->module_api_version >= CAMERA_MODULE_API_VERSION_2_1) {
@@ -226,6 +268,37 @@
 
 }
 
+void CameraService::onTorchStatusChanged(const String16& cameraId,
+        ICameraServiceListener::TorchStatus newStatus) {
+    Mutex::Autolock al(mTorchStatusMutex);
+    onTorchStatusChangedLocked(cameraId, newStatus);
+}
+
+void CameraService::onTorchStatusChangedLocked(const String16& cameraId,
+        ICameraServiceListener::TorchStatus newStatus) {
+    ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
+            __FUNCTION__, cameraId.string(), newStatus);
+
+    if (getTorchStatusLocked(cameraId) == newStatus) {
+        ALOGE("%s: Torch state transition to the same status 0x%x not allowed",
+              __FUNCTION__, (uint32_t)newStatus);
+        return;
+    }
+
+    status_t res = setTorchStatusLocked(cameraId, newStatus);
+    if (res) {
+        ALOGE("%s: Failed to set the torch status", __FUNCTION__,
+                (uint32_t)newStatus);
+        return;
+    }
+
+    Vector<sp<ICameraServiceListener> >::const_iterator it;
+    for (it = mListenerList.begin(); it != mListenerList.end(); ++it) {
+        (*it)->onTorchStatusChanged(newStatus, cameraId);
+    }
+}
+
+
 int32_t CameraService::getNumberOfCameras() {
     return mNumberOfCameras;
 }
@@ -680,6 +753,9 @@
         int halVersion,
         bool legacyMode) {
 
+    // give flashlight a chance to close devices if necessary.
+    mFlashlight->prepareDeviceOpen();
+
     int facing = -1;
     int deviceVersion = getDeviceVersion(cameraId, &facing);
 
@@ -856,6 +932,47 @@
     return OK;
 }
 
+status_t CameraService::setTorchMode(const String16& cameraId, bool enabled,
+        const sp<IBinder>& clientBinder) {
+    if (enabled && clientBinder == NULL) {
+        ALOGE("%s: torch client binder is NULL", __FUNCTION__);
+        return -ENOSYS;
+    }
+
+    Mutex::Autolock al(mTorchStatusMutex);
+    status_t res = mFlashlight->setTorchMode(cameraId, enabled);
+    if (res) {
+        ALOGE("%s: setting torch mode of camera %s to %d failed", __FUNCTION__,
+                cameraId.string(), enabled);
+        return res;
+    }
+
+    // update the link to client's death
+    ssize_t index = mTorchClientMap.indexOfKey(cameraId);
+    if (enabled) {
+        if (index == NAME_NOT_FOUND) {
+            mTorchClientMap.add(cameraId, clientBinder);
+        } else {
+            const sp<IBinder> oldBinder = mTorchClientMap.valueAt(index);
+            oldBinder->unlinkToDeath(this);
+
+            mTorchClientMap.replaceValueAt(index, clientBinder);
+        }
+        clientBinder->linkToDeath(this);
+    } else if (index != NAME_NOT_FOUND) {
+        sp<IBinder> oldBinder = mTorchClientMap.valueAt(index);
+        oldBinder->unlinkToDeath(this);
+    }
+
+    // notify the listeners the change.
+    ICameraServiceListener::TorchStatus status = enabled ?
+            ICameraServiceListener::TORCH_STATUS_ON :
+            ICameraServiceListener::TORCH_STATUS_OFF;
+    onTorchStatusChangedLocked(cameraId, status);
+
+    return OK;
+}
+
 status_t CameraService::connectFinishUnsafe(const sp<BasicClient>& client,
                                             const sp<IBinder>& remoteCallback) {
     status_t status = client->initialize(mModule);
@@ -981,6 +1098,9 @@
         int facing = -1;
         int deviceVersion = getDeviceVersion(cameraId, &facing);
 
+        // give flashlight a chance to close devices if necessary.
+        mFlashlight->prepareDeviceOpen();
+
         switch(deviceVersion) {
           case CAMERA_DEVICE_API_VERSION_1_0:
             ALOGW("Camera using old HAL version: %d", deviceVersion);
@@ -1052,6 +1172,16 @@
         }
     }
 
+    /* Immediately signal current torch status to this listener only */
+    {
+        Mutex::Autolock al(mTorchStatusMutex);
+        for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
+            listener->onTorchStatusChanged(mTorchStatusMap.valueAt(i),
+                    mTorchStatusMap.keyAt(i));
+        }
+
+    }
+
     return OK;
 }
 status_t CameraService::removeListener(
@@ -1731,6 +1861,23 @@
     return NO_ERROR;
 }
 
+void CameraService::handleTorchClientBinderDied(const wp<IBinder> &who) {
+    Mutex::Autolock al(mTorchStatusMutex);
+    for (size_t i = 0; i < mTorchClientMap.size(); i++) {
+        if (mTorchClientMap[i] == who) {
+            // turn off the torch mode that was turned on by dead client
+            String16 cameraId = mTorchClientMap.keyAt(i);
+            mFlashlight->setTorchMode(cameraId, false);
+            mTorchClientMap.removeItemsAt(i);
+
+            // notify torch mode was turned off
+            onTorchStatusChangedLocked(cameraId,
+                    ICameraServiceListener::TORCH_STATUS_OFF);
+            break;
+        }
+    }
+}
+
 /*virtual*/void CameraService::binderDied(
     const wp<IBinder> &who) {
 
@@ -1741,6 +1888,10 @@
 
     ALOGV("java clients' binder died");
 
+    // check torch client
+    handleTorchClientBinderDied(who);
+
+    // check camera device client
     sp<BasicClient> cameraClient = getClientByRemote(who);
 
     if (cameraClient == 0) {
@@ -1834,4 +1985,27 @@
     return mStatusList[cameraId];
 }
 
+ICameraServiceListener::TorchStatus CameraService::getTorchStatusLocked(
+        const String16& cameraId) const {
+    ssize_t index = mTorchStatusMap.indexOfKey(cameraId);
+    if (index == NAME_NOT_FOUND) {
+        return ICameraServiceListener::TORCH_STATUS_NOT_AVAILABLE;
+    }
+
+    return mTorchStatusMap.valueAt(index);
+}
+
+status_t CameraService::setTorchStatusLocked(const String16& cameraId,
+        ICameraServiceListener::TorchStatus status) {
+    ssize_t index = mTorchStatusMap.indexOfKey(cameraId);
+    if (index == NAME_NOT_FOUND) {
+        return BAD_VALUE;
+    }
+    ICameraServiceListener::TorchStatus& item =
+            mTorchStatusMap.editValueAt(index);
+    item = status;
+
+    return OK;
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 7d0df3a..84bcdb8 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -36,6 +36,8 @@
 #include <camera/CameraParameters.h>
 
 #include <camera/ICameraServiceListener.h>
+#include "CameraFlashlight.h"
+
 
 #include "common/CameraModule.h"
 
@@ -70,6 +72,9 @@
     // HAL Callbacks
     virtual void        onDeviceStatusChanged(int cameraId,
                                               int newStatus);
+    virtual void        onTorchStatusChanged(const String16& cameraId,
+                                             ICameraServiceListener::TorchStatus
+                                                   newStatus);
 
     /////////////////////////////////////////////////////////////////////
     // ICameraService
@@ -112,6 +117,9 @@
             /*out*/
             String16* parameters);
 
+    virtual status_t    setTorchMode(const String16& cameraId, bool enabled,
+            const sp<IBinder>& clientBinder);
+
     // OK = supports api of that version, -EOPNOTSUPP = does not support
     virtual status_t    supportsCameraApi(
             int cameraId, int apiVersion);
@@ -408,6 +416,32 @@
                             int32_t cameraId,
                             const StatusVector *rejectSourceStates = NULL);
 
+    // flashlight control
+    sp<CameraFlashlight> mFlashlight;
+    // guard mTorchStatusMap and mTorchClientMap
+    Mutex                mTorchStatusMutex;
+    // camera id -> torch status
+    KeyedVector<String16, ICameraServiceListener::TorchStatus> mTorchStatusMap;
+    // camera id -> torch client binder
+    // only store the last client that turns on each camera's torch mode
+    KeyedVector<String16, sp<IBinder> > mTorchClientMap;
+
+    // check and handle if torch client's process has died
+    void handleTorchClientBinderDied(const wp<IBinder> &who);
+
+    // handle torch mode status change and invoke callbacks. mTorchStatusMutex
+    // should be locked.
+    void onTorchStatusChangedLocked(const String16& cameraId,
+            ICameraServiceListener::TorchStatus newStatus);
+
+    // get a camera's torch status. mTorchStatusMutex should be locked.
+    ICameraServiceListener::TorchStatus getTorchStatusLocked(
+            const String16 &cameraId) const;
+
+    // set a camera's torch status. mTorchStatusMutex should be locked.
+    status_t setTorchStatusLocked(const String16 &cameraId,
+            ICameraServiceListener::TorchStatus status);
+
     // IBinder::DeathRecipient implementation
     virtual void        binderDied(const wp<IBinder> &who);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 4ac5166..5dbdeb2 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1959,7 +1959,7 @@
             return width * height * 2;
         case HAL_PIXEL_FORMAT_RGBA_8888:
             return width * height * 4;
-        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+        case HAL_PIXEL_FORMAT_RAW16:
             return width * height * 2;
         default:
             ALOGE("%s: Unknown preview format: %x",
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 4f4cfb0..87e0132 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -182,9 +182,9 @@
                 supportedPreviewFormats +=
                     CameraParameters::PIXEL_FORMAT_YUV420SP;
                 break;
-            // Not advertizing JPEG, RAW_SENSOR, etc, for preview formats
+            // Not advertizing JPEG, RAW16, etc, for preview formats
             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-            case HAL_PIXEL_FORMAT_RAW_SENSOR:
+            case HAL_PIXEL_FORMAT_RAW16:
             case HAL_PIXEL_FORMAT_BLOB:
                 addComma = false;
                 break;
@@ -2253,7 +2253,7 @@
         case HAL_PIXEL_FORMAT_RGBA_8888:   // RGBA8888
             fmt = CameraParameters::PIXEL_FORMAT_RGBA8888;
             break;
-        case HAL_PIXEL_FORMAT_RAW_SENSOR:
+        case HAL_PIXEL_FORMAT_RAW16:
             ALOGW("Raw sensor preview format requested.");
             fmt = CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
             break;
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index bbf47e8..85a4df9 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -66,6 +66,12 @@
         return -EINVAL;
     }
 
+    // Only override static_camera_characteristics for API2 devices
+    int apiVersion = mModule->common.module_api_version;
+    if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) {
+        return mModule->get_camera_info(cameraId, info);
+    }
+
     camera_info &wrappedInfo = mCameraInfo[cameraId];
     if (!mCameraInfoCached[cameraId]) {
         camera_info rawInfo;
@@ -75,14 +81,7 @@
         }
         CameraMetadata &m = mCameraCharacteristics[cameraId];
         m = rawInfo.static_camera_characteristics;
-        int deviceVersion;
-        int apiVersion = mModule->common.module_api_version;
-        if (apiVersion >= CAMERA_MODULE_API_VERSION_2_0) {
-            deviceVersion = rawInfo.device_version;
-        } else {
-            deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
-        }
-        deriveCameraCharacteristicsKeys(deviceVersion, m);
+        deriveCameraCharacteristicsKeys(rawInfo.device_version, m);
         wrappedInfo = rawInfo;
         wrappedInfo.static_camera_characteristics = m.getAndLock();
         mCameraInfoCached[cameraId] = true;