Merge "Passing device name up through getDeviceDescriptor() method (when creating)"
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index 54a4e8b..d448097 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -194,7 +194,7 @@
     };
 
     enum {
-        kFlagIsSoftwareCodec            = 1,
+        kFlagUsesSoftwareRenderer       = 1,
         kFlagOutputFormatChanged        = 2,
         kFlagOutputBuffersChanged       = 4,
         kFlagStickyError                = 8,
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index b175055..f2ff27b 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -73,6 +73,8 @@
     REGISTER_POLICY_MIXES,
 };
 
+#define MAX_ITEMS_PER_LIST 1024
+
 class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
 {
 public:
@@ -1058,10 +1060,18 @@
             audio_port_role_t role = (audio_port_role_t)data.readInt32();
             audio_port_type_t type = (audio_port_type_t)data.readInt32();
             unsigned int numPortsReq = data.readInt32();
+            if (numPortsReq > MAX_ITEMS_PER_LIST) {
+                numPortsReq = MAX_ITEMS_PER_LIST;
+            }
             unsigned int numPorts = numPortsReq;
-            unsigned int generation;
             struct audio_port *ports =
                     (struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port));
+            if (ports == NULL) {
+                reply->writeInt32(NO_MEMORY);
+                reply->writeInt32(0);
+                return NO_ERROR;
+            }
+            unsigned int generation;
             status_t status = listAudioPorts(role, type, &numPorts, ports, &generation);
             reply->writeInt32(status);
             reply->writeInt32(numPorts);
@@ -1115,11 +1125,19 @@
         case LIST_AUDIO_PATCHES: {
             CHECK_INTERFACE(IAudioPolicyService, data, reply);
             unsigned int numPatchesReq = data.readInt32();
+            if (numPatchesReq > MAX_ITEMS_PER_LIST) {
+                numPatchesReq = MAX_ITEMS_PER_LIST;
+            }
             unsigned int numPatches = numPatchesReq;
-            unsigned int generation;
             struct audio_patch *patches =
                     (struct audio_patch *)calloc(numPatchesReq,
                                                  sizeof(struct audio_patch));
+            if (patches == NULL) {
+                reply->writeInt32(NO_MEMORY);
+                reply->writeInt32(0);
+                return NO_ERROR;
+            }
+            unsigned int generation;
             status_t status = listAudioPatches(&numPatches, patches, &generation);
             reply->writeInt32(status);
             reply->writeInt32(numPatches);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 1f55706..fb8dbce 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1201,7 +1201,9 @@
         CHECK(format->findString("mime", &mime));
 
         sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id());
-        mCCDecoder = new CCDecoder(ccNotify);
+        if (mCCDecoder == NULL) {
+            mCCDecoder = new CCDecoder(ccNotify);
+        }
 
         if (mSourceFlags & Source::FLAG_SECURE) {
             format->setInt32("secure", true);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6a84c11..3751175 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -669,6 +669,7 @@
         // XXX: Currently this error is logged, but not fatal.
         usage = 0;
     }
+    int omxUsage = usage;
 
     if (mFlags & kFlagIsGrallocUsageProtected) {
         usage |= GRALLOC_USAGE_PROTECTED;
@@ -693,6 +694,18 @@
         }
     }
 
+    int consumerUsage = 0;
+    err = mNativeWindow->query(
+            mNativeWindow.get(), NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+            &consumerUsage);
+    if (err != 0) {
+        ALOGW("failed to get consumer usage bits. ignoring");
+        err = 0;
+    }
+
+    ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec) + %#x(Consumer) = %#x",
+            omxUsage, usage, consumerUsage, usage | consumerUsage);
+    usage |= consumerUsage;
     err = native_window_set_usage(
             mNativeWindow.get(),
             usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
@@ -1257,9 +1270,10 @@
         }
     }
 
+    // NOTE: we only use native window for video decoders
     sp<RefBase> obj;
     bool haveNativeWindow = msg->findObject("native-window", &obj)
-            && obj != NULL;
+            && obj != NULL && video && !encoder;
     mStoreMetaDataInOutputBuffers = false;
     if (video && !encoder) {
         inputFormat->setInt32("adaptive-playback", false);
@@ -1274,7 +1288,7 @@
             mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
         }
     }
-    if (!encoder && video && haveNativeWindow) {
+    if (haveNativeWindow) {
         sp<NativeWindowWrapper> windowWrapper(
                 static_cast<NativeWindowWrapper *>(obj.get()));
         sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
@@ -1323,6 +1337,8 @@
                 if (err != OK) {
                     ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
                             mComponentName.c_str(), err);
+                    // allow failure
+                    err = OK;
                 } else {
                     inputFormat->setInt32("max-width", maxWidth);
                     inputFormat->setInt32("max-height", maxHeight);
@@ -1416,11 +1432,80 @@
     }
 
     if (video) {
+        // determine need for software renderer
+        bool usingSwRenderer = false;
+        if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) {
+            usingSwRenderer = true;
+            haveNativeWindow = false;
+        }
+
         if (encoder) {
             err = setupVideoEncoder(mime, msg);
         } else {
             err = setupVideoDecoder(mime, msg, haveNativeWindow);
         }
+
+        if (err != OK) {
+            return err;
+        }
+
+        if (haveNativeWindow) {
+            sp<NativeWindowWrapper> nativeWindow(
+                    static_cast<NativeWindowWrapper *>(obj.get()));
+            CHECK(nativeWindow != NULL);
+            mNativeWindow = nativeWindow->getNativeWindow();
+
+            native_window_set_scaling_mode(
+                    mNativeWindow.get(), NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+        }
+
+        // initialize native window now to get actual output format
+        // TODO: this is needed for some encoders even though they don't use native window
+        CHECK_EQ((status_t)OK, initNativeWindow());
+
+        // fallback for devices that do not handle flex-YUV for native buffers
+        if (haveNativeWindow) {
+            int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
+            if (msg->findInt32("color-format", &requestedColorFormat) &&
+                    requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
+                CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
+                int32_t colorFormat = OMX_COLOR_FormatUnused;
+                OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
+                CHECK(outputFormat->findInt32("color-format", &colorFormat));
+                ALOGD("[%s] Requested output format %#x and got %#x.",
+                        mComponentName.c_str(), requestedColorFormat, colorFormat);
+                if (!isFlexibleColorFormat(
+                                mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
+                        || flexibleEquivalent != (OMX_U32)requestedColorFormat) {
+                    // device did not handle flex-YUV request for native window, fall back
+                    // to SW renderer
+                    ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
+                    mNativeWindow.clear();
+                    haveNativeWindow = false;
+                    usingSwRenderer = true;
+                    if (mStoreMetaDataInOutputBuffers) {
+                        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+                        mStoreMetaDataInOutputBuffers = false;
+                        // TODO: implement adaptive-playback support for bytebuffer mode.
+                        // This is done by SW codecs, but most HW codecs don't support it.
+                        inputFormat->setInt32("adaptive-playback", false);
+                    }
+                    if (err == OK) {
+                        err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+                    }
+                    if (mFlags & kFlagIsGrallocUsageProtected) {
+                        // fallback is not supported for protected playback
+                        err = PERMISSION_DENIED;
+                    } else if (err == OK) {
+                        err = setupVideoDecoder(mime, msg, false);
+                    }
+                }
+            }
+        }
+
+        if (usingSwRenderer) {
+            outputFormat->setInt32("using-sw-renderer", 1);
+        }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
         int32_t numChannels, sampleRate;
         if (!msg->findInt32("channel-count", &numChannels)
@@ -3198,7 +3283,8 @@
     if (fmt != OMX_COLOR_FormatYUV420Planar &&
         fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
         fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
-        fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar) {
+        fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
+        fmt != HAL_PIXEL_FORMAT_YV12) {
         ALOGW("do not know color format 0x%x = %d", fmt, fmt);
         return false;
     }
@@ -3227,8 +3313,31 @@
     image.mPlane[image.Y].mHorizSubsampling = 1;
     image.mPlane[image.Y].mVertSubsampling = 1;
 
-    switch (fmt) {
-        case OMX_COLOR_FormatYUV420Planar: // used for YV12
+    switch ((int)fmt) {
+        case HAL_PIXEL_FORMAT_YV12:
+            if (params.bUsingNativeBuffers) {
+                size_t ystride = align(params.nStride, 16);
+                size_t cstride = align(params.nStride / 2, 16);
+                image.mPlane[image.Y].mRowInc = ystride;
+
+                image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
+                image.mPlane[image.V].mColInc = 1;
+                image.mPlane[image.V].mRowInc = cstride;
+                image.mPlane[image.V].mHorizSubsampling = 2;
+                image.mPlane[image.V].mVertSubsampling = 2;
+
+                image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
+                        + (cstride * params.nSliceHeight / 2);
+                image.mPlane[image.U].mColInc = 1;
+                image.mPlane[image.U].mRowInc = cstride;
+                image.mPlane[image.U].mHorizSubsampling = 2;
+                image.mPlane[image.U].mVertSubsampling = 2;
+                break;
+            } else {
+                // fall through as YV12 is used for YUV420Planar by some codecs
+            }
+
+        case OMX_COLOR_FormatYUV420Planar:
         case OMX_COLOR_FormatYUV420PackedPlanar:
             image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
             image.mPlane[image.U].mColInc = 1;
@@ -3371,6 +3480,13 @@
                                     ABuffer::CreateAsCopy(
                                             &describeParams.sMediaImage,
                                             sizeof(describeParams.sMediaImage)));
+
+                            MediaImage *img = &describeParams.sMediaImage;
+                            ALOGV("[%s] MediaImage { F(%zux%zu) @%zu+%zu+%zu @%zu+%zu+%zu @%zu+%zu+%zu }",
+                                    mComponentName.c_str(), img->mWidth, img->mHeight,
+                                    img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc,
+                                    img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc,
+                                    img->mPlane[2].mOffset, img->mPlane[2].mColInc, img->mPlane[2].mRowInc);
                         }
                     }
 
@@ -3468,9 +3584,12 @@
                     break;
                 }
             }
-
             notify->setInt32("width", videoDef->nFrameWidth);
             notify->setInt32("height", videoDef->nFrameHeight);
+            ALOGV("[%s] %s format is %s", mComponentName.c_str(),
+                    portIndex == kPortIndexInput ? "input" : "output",
+                    notify->debugString().c_str());
+
             break;
         }
 
@@ -4919,20 +5038,6 @@
         return false;
     }
 
-    sp<RefBase> obj;
-    if (msg->findObject("native-window", &obj)
-            && strncmp("OMX.google.", mCodec->mComponentName.c_str(), 11)) {
-        sp<NativeWindowWrapper> nativeWindow(
-                static_cast<NativeWindowWrapper *>(obj.get()));
-        CHECK(nativeWindow != NULL);
-        mCodec->mNativeWindow = nativeWindow->getNativeWindow();
-
-        native_window_set_scaling_mode(
-                mCodec->mNativeWindow.get(),
-                NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
-    }
-    CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
-
     {
         sp<AMessage> notify = mCodec->mNotify->dup();
         notify->setInt32("what", CodecBase::kWhatComponentConfigured);
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c838427..50e6bd0 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -879,9 +879,9 @@
                     CHECK(msg->findString("componentName", &mComponentName));
 
                     if (mComponentName.startsWith("OMX.google.")) {
-                        mFlags |= kFlagIsSoftwareCodec;
+                        mFlags |= kFlagUsesSoftwareRenderer;
                     } else {
-                        mFlags &= ~kFlagIsSoftwareCodec;
+                        mFlags &= ~kFlagUsesSoftwareRenderer;
                     }
 
                     if (mComponentName.endsWith(".secure")) {
@@ -904,6 +904,11 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
+                    int32_t usingSwRenderer;
+                    if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
+                            && usingSwRenderer) {
+                        mFlags |= kFlagUsesSoftwareRenderer;
+                    }
                     setState(CONFIGURED);
                     (new AMessage)->postReply(mReplyID);
                     break;
@@ -999,7 +1004,7 @@
 
                     if (mSoftRenderer == NULL &&
                             mNativeWindow != NULL &&
-                            (mFlags & kFlagIsSoftwareCodec)) {
+                            (mFlags & kFlagUsesSoftwareRenderer)) {
                         AString mime;
                         CHECK(msg->findString("mime", &mime));
 
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 6474906..21da707 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -107,6 +107,7 @@
     if (!runningInEmulator()) {
         switch (mColorFormat) {
             case OMX_COLOR_FormatYUV420Planar:
+            case OMX_COLOR_FormatYUV420SemiPlanar:
             case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
             {
                 halFormat = HAL_PIXEL_FORMAT_YV12;
@@ -255,7 +256,8 @@
             dst_u += dst_c_stride;
             dst_v += dst_c_stride;
         }
-    } else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
+    } else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
+            || mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
         const uint8_t *src_y = (const uint8_t *)data;
         const uint8_t *src_uv = (const uint8_t *)data
                 + mWidth * (mHeight - mCropTop / 2);
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index eb62c7a..997b694 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -35,6 +35,7 @@
         TYPE_AUDIO,
         TYPE_VIDEO,
         TYPE_SUBS,
+        TYPE_CC,
     };
 
     enum FlagBits {
@@ -991,6 +992,8 @@
                 groupType = MediaGroup::TYPE_AUDIO;
             } else if (!strcasecmp("video", val.c_str())) {
                 groupType = MediaGroup::TYPE_VIDEO;
+            } else if (!strcasecmp("closed-captions", val.c_str())){
+                groupType = MediaGroup::TYPE_CC;
             } else {
                 ALOGE("Invalid media group type '%s'", val.c_str());
                 return ERROR_MALFORMED;
@@ -1103,6 +1106,13 @@
         return ERROR_MALFORMED;
     }
 
+    if (groupType == MediaGroup::TYPE_CC) {
+        // TODO: ignore this for now.
+        // CC track will be detected by CCDecoder. But we still need to
+        // pass the CC track flags (lang, auto) to the app in the future.
+        return OK;
+    }
+
     uint32_t flags = 0;
     if (haveGroupAutoselect && groupAutoselect) {
         flags |= MediaGroup::FLAG_AUTOSELECT;