Merge "Revert "Workaround slow AudioTrack destruction"" into klp-dev
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 68289a5..49999b5 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -464,12 +464,16 @@
     err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
 
     if (err != NO_ERROR && !gSizeSpecified) {
-        if (gVideoWidth != kFallbackWidth && gVideoHeight != kFallbackHeight) {
+        // fallback is defined for landscape; swap if we're in portrait
+        bool needSwap = gVideoWidth < gVideoHeight;
+        uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
+        uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
+        if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
             ALOGV("Retrying with 720p");
-            fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n",
-                    gVideoWidth, gVideoHeight);
-            gVideoWidth = kFallbackWidth;
-            gVideoHeight = kFallbackHeight;
+            fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
+                    gVideoWidth, gVideoHeight, newWidth, newHeight);
+            gVideoWidth = newWidth;
+            gVideoHeight = newHeight;
             err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
         }
     }
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 62f0c64..052064d 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -398,18 +398,20 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
-                void        pauseConditional();
-                                        // like pause(), but only if prior resume() wasn't latched
 
     private:
+                void        pauseInternal(nsecs_t ns = 0LL);
+                                        // like pause(), but only used internally within thread
+
         friend class AudioRecord;
         virtual bool        threadLoop();
         AudioRecord&        mReceiver;
         virtual ~AudioRecordThread();
         Mutex               mMyLock;    // Thread::mLock is private
         Condition           mMyCond;    // Thread::mThreadExitedCondition is private
-        bool                mPaused;    // whether thread is currently paused
-        bool                mResumeLatch;   // whether next pauseConditional() will be a nop
+        bool                mPaused;    // whether thread is requested to pause at next loop entry
+        bool                mPausedInt; // whether thread internally requests pause
+        nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
     };
 
             // body of AudioRecordThread::threadLoop()
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 453c106..22ad57e 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -598,18 +598,20 @@
 
                 void        pause();    // suspend thread from execution at next loop boundary
                 void        resume();   // allow thread to execute, if not requested to exit
-                void        pauseConditional();
-                                        // like pause(), but only if prior resume() wasn't latched
 
     private:
+                void        pauseInternal(nsecs_t ns = 0LL);
+                                        // like pause(), but only used internally within thread
+
         friend class AudioTrack;
         virtual bool        threadLoop();
         AudioTrack&         mReceiver;
         virtual ~AudioTrackThread();
         Mutex               mMyLock;    // Thread::mLock is private
         Condition           mMyCond;    // Thread::mThreadExitedCondition is private
-        bool                mPaused;    // whether thread is currently paused
-        bool                mResumeLatch;   // whether next pauseConditional() will be a nop
+        bool                mPaused;    // whether thread is requested to pause at next loop entry
+        bool                mPausedInt; // whether thread internally requests pause
+        nsecs_t             mPausedNs;  // if mPausedInt then associated timeout, otherwise ignored
     };
 
             // body of AudioTrackThread::threadLoop()
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index e934a3e..fb731b9 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -105,6 +105,7 @@
         // Otherwise the callback thread will never exit.
         stop();
         if (mAudioRecordThread != 0) {
+            mProxy->interrupt();
             mAudioRecordThread->requestExit();  // see comment in AudioRecord.h
             mAudioRecordThread->requestExitAndWait();
             mAudioRecordThread.clear();
@@ -960,7 +961,7 @@
 // =========================================================================
 
 AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
 {
 }
 
@@ -977,25 +978,32 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mPausedInt) {
+            mPausedInt = false;
+            if (mPausedNs > 0) {
+                (void) mMyCond.waitRelative(mMyLock, mPausedNs);
+            } else {
+                mMyCond.wait(mMyLock);
+            }
+            return true;
+        }
     }
     nsecs_t ns =  mReceiver.processAudioBuffer(this);
     switch (ns) {
     case 0:
         return true;
-    case NS_WHENEVER:
-        sleep(1);
-        return true;
     case NS_INACTIVE:
-        pauseConditional();
+        pauseInternal();
         return true;
     case NS_NEVER:
         return false;
+    case NS_WHENEVER:
+        // FIXME increase poll interval, or make event-driven
+        ns = 1000000000LL;
+        // fall through
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
-        struct timespec req;
-        req.tv_sec = ns / 1000000000LL;
-        req.tv_nsec = ns % 1000000000LL;
-        nanosleep(&req, NULL /*rem*/);
+        pauseInternal(ns);
         return true;
     }
 }
@@ -1004,24 +1012,18 @@
 {
     // must be in this order to avoid a race condition
     Thread::requestExit();
-    resume();
+    AutoMutex _l(mMyLock);
+    if (mPaused || mPausedInt) {
+        mPaused = false;
+        mPausedInt = false;
+        mMyCond.signal();
+    }
 }
 
 void AudioRecord::AudioRecordThread::pause()
 {
     AutoMutex _l(mMyLock);
     mPaused = true;
-    mResumeLatch = false;
-}
-
-void AudioRecord::AudioRecordThread::pauseConditional()
-{
-    AutoMutex _l(mMyLock);
-    if (mResumeLatch) {
-        mResumeLatch = false;
-    } else {
-        mPaused = true;
-    }
 }
 
 void AudioRecord::AudioRecordThread::resume()
@@ -1029,13 +1031,17 @@
     AutoMutex _l(mMyLock);
     if (mPaused) {
         mPaused = false;
-        mResumeLatch = false;
         mMyCond.signal();
-    } else {
-        mResumeLatch = true;
     }
 }
 
+void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)
+{
+    AutoMutex _l(mMyLock);
+    mPausedInt = true;
+    mPausedNs = ns;
+}
+
 // -------------------------------------------------------------------------
 
 }; // namespace android
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 15249a4..fdcf911 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -1782,7 +1782,7 @@
 // =========================================================================
 
 AudioTrack::AudioTrackThread::AudioTrackThread(AudioTrack& receiver, bool bCanCallJava)
-    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mResumeLatch(false)
+    : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
 {
 }
 
@@ -1799,25 +1799,32 @@
             // caller will check for exitPending()
             return true;
         }
+        if (mPausedInt) {
+            mPausedInt = false;
+            if (mPausedNs > 0) {
+                (void) mMyCond.waitRelative(mMyLock, mPausedNs);
+            } else {
+                mMyCond.wait(mMyLock);
+            }
+            return true;
+        }
     }
     nsecs_t ns = mReceiver.processAudioBuffer(this);
     switch (ns) {
     case 0:
         return true;
-    case NS_WHENEVER:
-        sleep(1);
-        return true;
     case NS_INACTIVE:
-        pauseConditional();
+        pauseInternal();
         return true;
     case NS_NEVER:
         return false;
+    case NS_WHENEVER:
+        // FIXME increase poll interval, or make event-driven
+        ns = 1000000000LL;
+        // fall through
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
-        struct timespec req;
-        req.tv_sec = ns / 1000000000LL;
-        req.tv_nsec = ns % 1000000000LL;
-        nanosleep(&req, NULL /*rem*/);
+        pauseInternal(ns);
         return true;
     }
 }
@@ -1826,24 +1833,18 @@
 {
     // must be in this order to avoid a race condition
     Thread::requestExit();
-    resume();
+    AutoMutex _l(mMyLock);
+    if (mPaused || mPausedInt) {
+        mPaused = false;
+        mPausedInt = false;
+        mMyCond.signal();
+    }
 }
 
 void AudioTrack::AudioTrackThread::pause()
 {
     AutoMutex _l(mMyLock);
     mPaused = true;
-    mResumeLatch = false;
-}
-
-void AudioTrack::AudioTrackThread::pauseConditional()
-{
-    AutoMutex _l(mMyLock);
-    if (mResumeLatch) {
-        mResumeLatch = false;
-    } else {
-        mPaused = true;
-    }
 }
 
 void AudioTrack::AudioTrackThread::resume()
@@ -1851,11 +1852,15 @@
     AutoMutex _l(mMyLock);
     if (mPaused) {
         mPaused = false;
-        mResumeLatch = false;
         mMyCond.signal();
-    } else {
-        mResumeLatch = true;
     }
 }
 
+void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)
+{
+    AutoMutex _l(mMyLock);
+    mPausedInt = true;
+    mPausedNs = ns;
+}
+
 }; // namespace android
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 8fbac42..f6e4c6a 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -264,6 +264,12 @@
         }
     }
 
+    result.append("Notification Clients:\n");
+    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
+        snprintf(buffer, SIZE, "  pid: %d\n", mNotificationClients.keyAt(i));
+        result.append(buffer);
+    }
+
     result.append("Global session refs:\n");
     result.append(" session pid count\n");
     for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
@@ -1850,6 +1856,16 @@
     Mutex::Autolock _l(mLock);
     pid_t caller = IPCThreadState::self()->getCallingPid();
     ALOGV("acquiring %d from %d", audioSession, caller);
+
+    // Ignore requests received from processes not known as notification client. The request
+    // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
+    // called from a different pid leaving a stale session reference.  Also we don't know how
+    // to clear this reference if the client process dies.
+    if (mNotificationClients.indexOfKey(caller) < 0) {
+        ALOGV("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
+        return;
+    }
+
     size_t num = mAudioSessionRefs.size();
     for (size_t i = 0; i< num; i++) {
         AudioSessionRef *ref = mAudioSessionRefs.editItemAt(i);
@@ -1882,7 +1898,9 @@
             return;
         }
     }
-    ALOGW("session id %d not found for pid %d", audioSession, caller);
+    // If the caller is mediaserver it is likely that the session being released was acquired
+    // on behalf of a process not in notification clients and we ignore the warning.
+    ALOGW_IF(caller != getpid_cached, "session id %d not found for pid %d", audioSession, caller);
 }
 
 void AudioFlinger::purgeStaleEffects_l() {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 7d0ecac..5f36cab 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -960,6 +960,7 @@
         mUseAsyncWrite(false),
         mWriteAckSequence(0),
         mDrainSequence(0),
+        mSignalPending(false),
         mScreenState(AudioFlinger::mScreenState),
         // index 0 is reserved for normal mixer's submix
         mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1),
@@ -1348,14 +1349,14 @@
 {
     Mutex::Autolock _l(mLock);
     mStreamTypes[stream].volume = value;
-    signal_l();
+    broadcast_l();
 }
 
 void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     Mutex::Autolock _l(mLock);
     mStreamTypes[stream].mute = muted;
-    signal_l();
+    broadcast_l();
 }
 
 float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const
@@ -1413,8 +1414,8 @@
         status = NO_ERROR;
     }
 
-    ALOGV("mWaitWorkCV.broadcast");
-    mWaitWorkCV.broadcast();
+    ALOGV("signal playback thread");
+    broadcast_l();
 
     return status;
 }
@@ -1455,14 +1456,14 @@
     }
 }
 
-void AudioFlinger::PlaybackThread::signal_l()
+void AudioFlinger::PlaybackThread::broadcast_l()
 {
     // Thread could be blocked waiting for async
     // so signal it to handle state changes immediately
     // If threadLoop is currently unlocked a signal of mWaitWorkCV will
     // be lost so we also flag to prevent it blocking on mWaitWorkCV
     mSignalPending = true;
-    mWaitWorkCV.signal();
+    mWaitWorkCV.broadcast();
 }
 
 String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys)
@@ -2143,7 +2144,6 @@
             }
 
             saveOutputTracks();
-
             if (mSignalPending) {
                 // A signal was raised while we were unlocked
                 mSignalPending = false;
@@ -2158,10 +2158,10 @@
                 acquireWakeLock_l();
                 standbyTime = systemTime() + standbyDelay;
                 sleepTime = 0;
-                if (exitPending()) {
-                    break;
-                }
-            } else if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
+
+                continue;
+            }
+            if ((!mActiveTracks.size() && systemTime() > standbyTime) ||
                                    isSuspended()) {
                 // put audio hardware into standby after short delay
                 if (shouldStandby_l()) {
@@ -2203,7 +2203,6 @@
                     continue;
                 }
             }
-
             // mMixerStatusIgnoringFastTracks is also updated internally
             mMixerStatus = prepareTracks_l(&tracksToRemove);
 
@@ -2340,6 +2339,22 @@
 
 }
 
+status_t AudioFlinger::PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp)
+{
+    if (mNormalSink != 0) {
+        return mNormalSink->getTimestamp(timestamp);
+    }
+    if (mType == OFFLOAD && mOutput->stream->get_presentation_position) {
+        uint64_t position64;
+        int ret = mOutput->stream->get_presentation_position(
+                                                mOutput->stream, &position64, &timestamp.mTime);
+        if (ret == 0) {
+            timestamp.mPosition = (uint32_t)position64;
+            return NO_ERROR;
+        }
+    }
+    return INVALID_OPERATION;
+}
 // ----------------------------------------------------------------------------
 
 AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
@@ -3855,13 +3870,14 @@
     Vector< sp<Track> > *tracksToRemove
 )
 {
-    ALOGV("OffloadThread::prepareTracks_l");
     size_t count = mActiveTracks.size();
 
     mixer_state mixerStatus = MIXER_IDLE;
     bool doHwPause = false;
     bool doHwResume = false;
 
+    ALOGV("OffloadThread::prepareTracks_l active tracks %d", count);
+
     // find out which tracks need to be processed
     for (size_t i = 0; i < count; i++) {
         sp<Track> t = mActiveTracks[i].promote();
@@ -3915,23 +3931,27 @@
                 // make sure processVolume_l() will apply new volume even if 0
                 mLeftVolFloat = mRightVolFloat = -1.0;
                 if (track->mState == TrackBase::RESUMING) {
-                    if (mPausedBytesRemaining) {
-                        // Need to continue write that was interrupted
-                        mCurrentWriteLength = mPausedWriteLength;
-                        mBytesRemaining = mPausedBytesRemaining;
-                        mPausedBytesRemaining = 0;
-                    }
                     track->mState = TrackBase::ACTIVE;
+                    if (last) {
+                        if (mPausedBytesRemaining) {
+                            // Need to continue write that was interrupted
+                            mCurrentWriteLength = mPausedWriteLength;
+                            mBytesRemaining = mPausedBytesRemaining;
+                            mPausedBytesRemaining = 0;
+                        }
+                        if (mHwPaused) {
+                            doHwResume = true;
+                            mHwPaused = false;
+                            // threadLoop_mix() will handle the case that we need to
+                            // resume an interrupted write
+                        }
+                        // enable write to audio HAL
+                        sleepTime = 0;
+                    }
                 }
             }
 
             if (last) {
-                if (mHwPaused) {
-                    doHwResume = true;
-                    mHwPaused = false;
-                    // threadLoop_mix() will handle the case that we need to
-                    // resume an interrupted write
-                }
                 // reset retry count
                 track->mRetryCount = kMaxTrackRetriesOffload;
                 mActiveTrack = t;
@@ -3948,9 +3968,9 @@
                     // has been written
                     ALOGV("OffloadThread: underrun and STOPPING_1 -> draining, STOPPING_2");
                     track->mState = TrackBase::STOPPING_2; // so presentation completes after drain
-                    sleepTime = 0;
-                    standbyTime = systemTime() + standbyDelay;
                     if (last) {
+                        sleepTime = 0;
+                        standbyTime = systemTime() + standbyDelay;
                         mixerStatus = MIXER_DRAIN_TRACK;
                         mDrainSequence += 2;
                         if (mHwPaused) {
@@ -4337,6 +4357,7 @@
                     mStandby = false;
                 }
             }
+
             lockEffectChains_l(effectChains);
         }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 3fe470c..443b8d7 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -466,6 +466,8 @@
                 // Return's the HAL's frame count i.e. fast mixer buffer size.
                 size_t      frameCountHAL() const { return mFrameCount; }
 
+                status_t         getTimestamp_l(AudioTimestamp& timestamp);
+
 protected:
     // updated by readOutputParameters()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -526,7 +528,7 @@
     status_t    addTrack_l(const sp<Track>& track);
     bool        destroyTrack_l(const sp<Track>& track);
     void        removeTrack_l(const sp<Track>& track);
-    void        signal_l();
+    void        broadcast_l();
 
     void        readOutputParameters();
 
@@ -590,6 +592,8 @@
     // Bit 0 is reset by the async callback thread calling resetDraining(). Out of sequence
     // callbacks are ignored.
     uint32_t                        mDrainSequence;
+    // A condition that must be evaluated by prepareTrack_l() has changed and we must not wait
+    // for async write callback in the thread loop before evaluating it
     bool                            mSignalPending;
     sp<AsyncCallbackThread>         mCallbackThread;
 
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 57aad1e..d8d325d 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -651,7 +651,7 @@
         case RESUMING:
             mState = PAUSING;
             ALOGV("ACTIVE/RESUMING => PAUSING (%d) on thread %p", mName, thread.get());
-            playbackThread->signal_l();
+            playbackThread->broadcast_l();
             break;
 
         default:
@@ -711,7 +711,7 @@
         // before mixer thread can run. This is important when offloading
         // because the hardware buffer could hold a large amount of audio
         playbackThread->flushOutput_l();
-        playbackThread->signal_l();
+        playbackThread->broadcast_l();
     }
 }
 
@@ -757,19 +757,23 @@
     }
     Mutex::Autolock _l(thread->mLock);
     PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
-    if (!playbackThread->mLatchQValid) {
-        return INVALID_OPERATION;
+    if (!isOffloaded()) {
+        if (!playbackThread->mLatchQValid) {
+            return INVALID_OPERATION;
+        }
+        uint32_t unpresentedFrames =
+                ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) /
+                playbackThread->mSampleRate;
+        uint32_t framesWritten = mAudioTrackServerProxy->framesReleased();
+        if (framesWritten < unpresentedFrames) {
+            return INVALID_OPERATION;
+        }
+        timestamp.mPosition = framesWritten - unpresentedFrames;
+        timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime;
+        return NO_ERROR;
     }
-    uint32_t unpresentedFrames =
-            ((int64_t) playbackThread->mLatchQ.mUnpresentedFrames * mSampleRate) /
-            playbackThread->mSampleRate;
-    uint32_t framesWritten = mAudioTrackServerProxy->framesReleased();
-    if (framesWritten < unpresentedFrames) {
-        return INVALID_OPERATION;
-    }
-    timestamp.mPosition = framesWritten - unpresentedFrames;
-    timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime;
-    return NO_ERROR;
+
+    return playbackThread->getTimestamp_l(timestamp);
 }
 
 status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId)
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index e7f6c53..fb81468 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -326,6 +326,10 @@
     result.appendFormat("    Video stabilization is %s\n",
             p.videoStabilization ? "enabled" : "disabled");
 
+    result.appendFormat("    Selected still capture FPS range: %d - %d\n",
+            p.fastInfo.bestStillCaptureFpsRange[0],
+            p.fastInfo.bestStillCaptureFpsRange[1]);
+
     result.append("  Current streams:\n");
     result.appendFormat("    Preview stream ID: %d\n",
             getPreviewStreamId());
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index ad55feb..0705791 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -852,6 +852,33 @@
         arrayHeight = activeArraySize.data.i32[3];
     } else return NO_INIT;
 
+    // We'll set the target FPS range for still captures to be as wide
+    // as possible to give the HAL maximum latitude for exposure selection
+    camera_metadata_ro_entry_t availableFpsRanges =
+        staticInfo(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2);
+    if (availableFpsRanges.count < 2 || availableFpsRanges.count % 2 != 0) {
+        return NO_INIT;
+    }
+
+    int32_t bestStillCaptureFpsRange[2] = {
+        availableFpsRanges.data.i32[0], availableFpsRanges.data.i32[1]
+    };
+    int32_t curRange =
+            bestStillCaptureFpsRange[1] - bestStillCaptureFpsRange[0];
+    for (size_t i = 2; i < availableFpsRanges.count; i += 2) {
+        int32_t nextRange =
+                availableFpsRanges.data.i32[i + 1] -
+                availableFpsRanges.data.i32[i];
+        if ( (nextRange > curRange) ||       // Maximize size of FPS range first
+                (nextRange == curRange &&    // Then minimize low-end FPS
+                 bestStillCaptureFpsRange[0] > availableFpsRanges.data.i32[i])) {
+
+            bestStillCaptureFpsRange[0] = availableFpsRanges.data.i32[i];
+            bestStillCaptureFpsRange[1] = availableFpsRanges.data.i32[i + 1];
+            curRange = nextRange;
+        }
+    }
+
     camera_metadata_ro_entry_t availableFaceDetectModes =
         staticInfo(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 0, 0,
                 false);
@@ -971,6 +998,8 @@
 
     fastInfo.arrayWidth = arrayWidth;
     fastInfo.arrayHeight = arrayHeight;
+    fastInfo.bestStillCaptureFpsRange[0] = bestStillCaptureFpsRange[0];
+    fastInfo.bestStillCaptureFpsRange[1] = bestStillCaptureFpsRange[1];
     fastInfo.bestFaceDetectMode = bestFaceDetectMode;
     fastInfo.maxFaces = maxFaces;
 
@@ -1709,8 +1738,15 @@
             &metadataMode, 1);
     if (res != OK) return res;
 
-    res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
-            previewFpsRange, 2);
+    camera_metadata_entry_t intent =
+            request->find(ANDROID_CONTROL_CAPTURE_INTENT);
+    if (intent.data.u8[0] == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
+        res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                fastInfo.bestStillCaptureFpsRange, 2);
+    } else {
+        res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                previewFpsRange, 2);
+    }
     if (res != OK) return res;
 
     uint8_t reqWbLock = autoWhiteBalanceLock ?
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index a7111a3..b9ca7bf 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -179,6 +179,7 @@
     struct DeviceInfo {
         int32_t arrayWidth;
         int32_t arrayHeight;
+        int32_t bestStillCaptureFpsRange[2];
         uint8_t bestFaceDetectMode;
         int32_t maxFaces;
         struct OverrideModes {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 055ea12..83466cb 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -360,6 +360,26 @@
 
         ALOGV("%s: Camera %d: Successfully created a new stream ID %d",
               __FUNCTION__, mCameraId, streamId);
+
+        /**
+         * Set the stream transform flags to automatically
+         * rotate the camera stream for preview use cases.
+         */
+        int32_t transform = 0;
+        res = getRotationTransformLocked(&transform);
+
+        if (res != OK) {
+            // Error logged by getRotationTransformLocked.
+            return res;
+        }
+
+        res = mDevice->setStreamTransform(streamId, transform);
+        if (res != OK) {
+            ALOGE("%s: Failed to set stream transform (stream id %d)",
+                  __FUNCTION__, streamId);
+            return res;
+        }
+
         return streamId;
     }
 
@@ -560,4 +580,64 @@
     return true;
 }
 
+status_t CameraDeviceClient::getRotationTransformLocked(int32_t* transform) {
+    ALOGV("%s: begin", __FUNCTION__);
+
+    if (transform == NULL) {
+        ALOGW("%s: null transform", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    *transform = 0;
+
+    const CameraMetadata& staticInfo = mDevice->info();
+    camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_SENSOR_ORIENTATION);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: Can't find android.sensor.orientation in "
+                "static metadata!", __FUNCTION__, mCameraId);
+        return INVALID_OPERATION;
+    }
+
+    int32_t& flags = *transform;
+
+    int orientation = entry.data.i32[0];
+    switch (orientation) {
+        case 0:
+            flags = 0;
+            break;
+        case 90:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_90;
+            break;
+        case 180:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_180;
+            break;
+        case 270:
+            flags = NATIVE_WINDOW_TRANSFORM_ROT_270;
+            break;
+        default:
+            ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
+                  __FUNCTION__, orientation);
+            return INVALID_OPERATION;
+    }
+
+    /**
+     * This magic flag makes surfaceflinger un-rotate the buffers
+     * to counter the extra global device UI rotation whenever the user
+     * physically rotates the device.
+     *
+     * By doing this, the camera buffer always ends up aligned
+     * with the physical camera for a "see through" effect.
+     *
+     * In essence, the buffer only gets rotated during preview use-cases.
+     * The user is still responsible to re-create streams of the proper
+     * aspect ratio, or the preview will end up looking non-uniformly
+     * stretched.
+     */
+    flags |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+
+    ALOGV("%s: final transform = 0x%x", __FUNCTION__, flags);
+
+    return OK;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c6b6336..b490924 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -120,6 +120,9 @@
                                            const CameraMetadata& frame);
     virtual void          detachDevice();
 
+    // Calculate the ANativeWindow transform from android.sensor.orientation
+    status_t              getRotationTransformLocked(/*out*/int32_t* transform);
+
 private:
     /** ICameraDeviceUser interface-related private members */