DO NOT MERGE - Merge tm-qpr2-release into tm-platform-merge

Bug: 269346300
Change-Id: I26134432454b0554dd378457de59e16bc5fe6b5f
diff --git a/camera/Android.bp b/camera/Android.bp
index e44202b..3e28e4f 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -93,6 +93,7 @@
         "libgui",
         "libcamera_metadata",
         "libnativewindow",
+        "lib-platform-compat-native-api",
     ],
 
     include_dirs: [
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 604dbb8..d1618e4 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,10 @@
 }
 
 sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion)
+        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait)
 {
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion);
+            clientPid, targetSdkVersion, overrideToPortrait);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 24c9108..0a5bc12 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -23,6 +23,7 @@
 #include <cutils/properties.h>
 
 #include <android/hardware/ICameraService.h>
+#include <com/android/internal/compat/IPlatformCompatNative.h>
 
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -161,7 +162,8 @@
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const String16& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion)
+                                               int clientUid, int clientPid, int targetSdkVersion,
+                                               bool overrideToPortrait)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -171,8 +173,9 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
+        ALOGI("Connect camera (legacy API) - overrideToPortrait %d", overrideToPortrait);
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                                               clientPid, targetSdkVersion, /*out*/ &c->mCamera);
+                clientPid, targetSdkVersion, overrideToPortrait, /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -273,10 +276,11 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
+        bool overrideToPortrait,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 1e748c7..01baba1 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -67,7 +67,7 @@
     /**
      * Fetch basic camera information for a camera device
      */
-    CameraInfo getCameraInfo(int cameraId);
+    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
 
     /**
      * Default UID/PID values for non-privileged callers of
@@ -83,7 +83,8 @@
             int cameraId,
             String opPackageName,
             int clientUid, int clientPid,
-            int targetSdkVersion);
+            int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Open a camera device through the new camera API
@@ -94,7 +95,8 @@
             String opPackageName,
             @nullable String featureId,
             int clientUid, int oomScoreOffset,
-            int targetSdkVersion);
+            int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Add listener for changes to camera device and flashlight state.
@@ -135,7 +137,8 @@
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
      */
-    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion);
+    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion,
+            boolean overrideToPortrait);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 58ccd69..26c36a7 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -58,7 +58,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const String16&, int, int, int,
+        int, const String16&, int, int, int, bool,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -81,7 +81,8 @@
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
                                 const String16& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion);
+                                int clientUid, int clientPid, int targetSdkVersion,
+                                bool overrideToPortrait);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 8e53968..9d0721b 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -119,7 +119,8 @@
 
     static sp<TCam>      connect(int cameraId,
                                  const String16& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion);
+                                 int clientUid, int clientPid, int targetSdkVersion,
+                                 bool overrideToPortrait);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
@@ -127,6 +128,7 @@
     static int           getNumberOfCameras();
 
     static status_t      getCameraInfo(int cameraId,
+                                       bool overrideToPortrait,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
 
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 5892f1a..23d90cc 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -692,10 +692,11 @@
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
     }
+
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
     binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
-            targetSdkVersion, &rawMetadata);
+            targetSdkVersion, /*overrideToPortrait*/true, &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
             case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -747,7 +748,7 @@
     binder::Status serviceRet = cs->connectDevice(
             callbacks, String16(cameraId), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*out*/&deviceRemote);
+            targetSdkVersion, /*overrideToPortrait*/true, /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
         ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().string());
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 17ea512..1af5637 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -370,7 +370,7 @@
         // Check metadata binder call
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -386,7 +386,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
                 {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -429,7 +430,8 @@
             SCOPED_TRACE("openNewDevice");
             binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
                     {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
-                    /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&device);
+                    /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                    /*overrideToPortrait*/false, /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 76dc38c..f2fa48c 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -74,7 +74,8 @@
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index efd9dae..bdfb84a 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -182,7 +182,8 @@
 
         CameraMetadata metadata;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -209,7 +210,8 @@
         rc = mCameraService->connect(this, cameraId,
                 String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
                 hardware::ICameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*overrideToPortrait*/false, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 2e0b678..d866c18 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#include <algorithm>
+#include <string_view>
+#include <type_traits>
 
 #include <assert.h>
 #include <ctype.h>
@@ -100,7 +103,6 @@
 static const uint32_t kFallbackHeight = 720;
 static const char* kMimeTypeAvc = "video/avc";
 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
-static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
 
 // Command-line parameters.
 static bool gVerbose = false;           // chatty on stdout
@@ -354,14 +356,15 @@
 }
 
 /*
- * Writes an unsigned integer byte-by-byte in little endian order regardless
+ * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
  * of the platform endianness.
  */
-template <typename UINT>
-static void writeValueLE(UINT value, uint8_t* buffer) {
-    for (int i = 0; i < sizeof(UINT); ++i) {
-        buffer[i] = static_cast<uint8_t>(value);
-        value >>= 8;
+template <typename T>
+static void writeValueLE(T value, uint8_t* buffer) {
+    std::remove_const_t<T> temp = value;
+    for (int i = 0; i < sizeof(T); ++i) {
+        buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
+        temp >>= 8;
     }
 }
 
@@ -377,16 +380,18 @@
  * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
  *   (as little endian uint64).
  */
-static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
+static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
         const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
-    ALOGV("Writing metadata");
+    static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
+
+    ALOGV("Writing winscope metadata legacy");
     int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
         - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
     sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
-        + sizeof(uint32_t) + strlen(kWinscopeMagicString));
+        + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
     uint8_t* pos = buffer->data();
-    strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
-    pos += strlen(kWinscopeMagicString);
+    strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
+    pos += strlen(kWinscopeMagicStringLegacy);
     writeValueLE<uint32_t>(timestamps.size(), pos);
     pos += sizeof(uint32_t);
     for (size_t idx = 0; idx < timestamps.size(); ++idx) {
@@ -395,10 +400,79 @@
         pos += sizeof(uint64_t);
     }
     AMediaCodecBufferInfo bufferInfo = {
-        0,
+        0 /* offset */,
         static_cast<int32_t>(buffer->size()),
-        timestamps[0],
-        0
+        timestamps[0] /* presentationTimeUs */,
+        0 /* flags */
+    };
+    return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
+}
+
+/*
+ * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
+ *
+ * The metadata (version 2) is written as a binary array with the following format:
+ * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
+ * - for each recorded frame:
+ *     - System time in elapsed clock timebase in nanoseconds (8B little endian).
+ *
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
+ */
+static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
+        const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
+    ALOGV("Writing winscope metadata");
+
+    static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
+    static constexpr std::uint32_t metadataVersion = 2;
+
+    const auto elapsedTimeNs = android::elapsedRealtimeNano();
+    const std::int64_t elapsedToMonotonicTimeOffsetNs =
+            elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+    const std::int64_t realToElapsedTimeOffsetNs =
+            systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
+    const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
+
+    sp<ABuffer> buffer = new ABuffer(
+        kWinscopeMagicString.size() +
+        sizeof(decltype(metadataVersion)) +
+        sizeof(decltype(realToElapsedTimeOffsetNs)) +
+        sizeof(decltype(framesCount)) +
+        framesCount * sizeof(std::uint64_t)
+    );
+    std::uint8_t* pos = buffer->data();
+
+    std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
+    pos += kWinscopeMagicString.size();
+
+    writeValueLE(metadataVersion, pos);
+    pos += sizeof(decltype(metadataVersion));
+
+    writeValueLE(realToElapsedTimeOffsetNs, pos);
+    pos += sizeof(decltype(realToElapsedTimeOffsetNs));
+
+    writeValueLE(framesCount, pos);
+    pos += sizeof(decltype(framesCount));
+
+    for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
+        const auto timestampElapsedNs =
+                elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+        writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
+        pos += sizeof(std::uint64_t);
+    }
+
+    AMediaCodecBufferInfo bufferInfo = {
+        0 /* offset */,
+        static_cast<std::int32_t>(buffer->size()),
+        timestampsMonotonicUs[0] /* presentationTimeUs */,
+        0 /* flags */
     };
     return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
 }
@@ -418,11 +492,12 @@
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
+    ssize_t metaLegacyTrackIdx = -1;
     ssize_t metaTrackIdx = -1;
     uint32_t debugNumFrames = 0;
     int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
-    Vector<int64_t> timestamps;
+    Vector<int64_t> timestampsMonotonicUs;
     bool firstFrame = true;
 
     assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
@@ -520,9 +595,9 @@
                     sp<ABuffer> buffer = new ABuffer(
                             buffers[bufIndex]->data(), buffers[bufIndex]->size());
                     AMediaCodecBufferInfo bufferInfo = {
-                        0,
+                        0 /* offset */,
                         static_cast<int32_t>(buffer->size()),
-                        ptsUsec,
+                        ptsUsec /* presentationTimeUs */,
                         flags
                     };
                     err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
@@ -532,7 +607,7 @@
                         return err;
                     }
                     if (gOutputFormat == FORMAT_MP4) {
-                        timestamps.add(ptsUsec);
+                        timestampsMonotonicUs.add(ptsUsec);
                     }
                 }
                 debugNumFrames++;
@@ -565,6 +640,7 @@
                     if (gOutputFormat == FORMAT_MP4) {
                         AMediaFormat *metaFormat = AMediaFormat_new();
                         AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
+                        metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                         metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
                         AMediaFormat_delete(metaFormat);
                     }
@@ -604,10 +680,16 @@
                         systemTime(CLOCK_MONOTONIC) - startWhenNsec));
         fflush(stdout);
     }
-    if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
-        err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
+    if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
+        err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
         if (err != NO_ERROR) {
-            fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
+            fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
+            return err;
+        }
+
+        err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
+        if (err != NO_ERROR) {
+            fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
             return err;
         }
     }
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
index ac87fe4..d858f27 100644
--- a/media/codec2/vndk/include/C2SurfaceSyncObj.h
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -72,12 +72,13 @@
     /**
      * Notify a buffer is queued. Return whether the upcoming dequeue operation
      * is not blocked. if it's blocked and waitId is non-null, waitId is returned
-     * to be used for waiting.
+     * to be used for waiting. Notify(wake-up) waitors only when 'notify' is
+     * true.
      *
      * \retval false    dequeue operation is blocked now.
      * \retval true     dequeue operation is possible.
      */
-    bool notifyQueuedLocked(uint32_t *waitId = nullptr);
+    bool notifyQueuedLocked(uint32_t *waitId = nullptr, bool notify = true);
 
     /**
      * Notify a buffer is dequeued.
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index e67e42f..f2cd585 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,12 +432,16 @@
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
+            if (bufferNeedsReallocation) {
+                mBuffers[slot].clear();
+            }
+
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     dequeueable = syncVar->notifyQueuedLocked(&waitId);
                     syncVar->unlock();
                     if (c2Fence) {
@@ -452,8 +456,8 @@
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     syncVar->notifyQueuedLocked();
                     syncVar->unlock();
                     if (c2Fence) {
@@ -502,8 +506,8 @@
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     syncVar->notifyQueuedLocked();
                     syncVar->unlock();
                     if (c2Fence) {
@@ -550,8 +554,8 @@
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
             if (syncVar) {
-                syncVar->lock();
                 (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                syncVar->lock();
                 syncVar->notifyQueuedLocked();
                 syncVar->unlock();
                 if (c2Fence) {
@@ -813,11 +817,10 @@
         if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId && !mOwner.expired()) {
             C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
             if (syncVar) {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
                 syncVar->lock();
-                if (syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE) {
-                    mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
-                    syncVar->notifyQueuedLocked();
-                }
+                syncVar->notifyQueuedLocked(nullptr,
+                        syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE);
                 syncVar->unlock();
             } else {
                 mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
@@ -826,11 +829,10 @@
     } else if (!mOwner.expired()) {
         C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
         if (syncVar) {
+            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
             syncVar->lock();
-            if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING) {
-                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
-                syncVar->notifyQueuedLocked();
-            }
+            syncVar->notifyQueuedLocked(nullptr,
+                    syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING);
             syncVar->unlock();
         } else {
             mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 2115cc3..bf4ca32 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -64,6 +64,11 @@
     }
 
     HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    if (o->size() < sizeof(C2SyncVariables)) {
+        android_errorWriteLog(0x534e4554, "240140929");
+        return nullptr;
+    }
+
     void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
 
     if (ptr == MAP_FAILED) {
@@ -177,12 +182,14 @@
     return true;
 }
 
-bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId) {
+bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId, bool notify) {
     // Note. thundering herds may occur. Edge trigged signalling.
     // But one waiter will guarantee to dequeue. others may wait again.
     // Minimize futex syscall(trap) for the main use case(one waiter case).
     if (mMaxDequeueCount == mCurDequeueCount--) {
-        broadcast();
+        if (notify) {
+            broadcast();
+        }
         return true;
     }
 
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 6ad97d1..f290453 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -22,10 +22,10 @@
 
 #include <stdint.h>
 #include <sys/types.h>
-
+#include "IAudioFlinger.h"
 #include <binder/IPCThreadState.h>
 #include <binder/Parcel.h>
-#include "IAudioFlinger.h"
+#include <system/thread_defs.h>
 
 namespace android {
 
@@ -840,7 +840,9 @@
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
-        const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {}
+        const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+}
 
 status_t AudioFlingerServerAdapter::onTransact(uint32_t code,
                                                const Parcel& data,
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index 3dee40a..31d469c 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -42,6 +42,7 @@
 
 // Identifier to use for our event queue on the loop.
 // The number 19 is arbitrary, only useful if using multiple objects on the same looper.
+// Note: Instead of a fixed number, the SensorEventQueue's fd could be used instead.
 constexpr int kIdent = 19;
 
 static inline Looper* ALooper_to_Looper(ALooper* alooper) {
@@ -60,7 +61,8 @@
     EventQueueGuard(const sp<SensorEventQueue>& queue, Looper* looper) : mQueue(queue) {
         mQueue->looper = Looper_to_ALooper(looper);
         mQueue->requestAdditionalInfo = false;
-        looper->addFd(mQueue->getFd(), kIdent, ALOOPER_EVENT_INPUT, nullptr, nullptr);
+        looper->addFd(mQueue->getFd(), kIdent, ALOOPER_EVENT_INPUT,
+                nullptr /* callback */, nullptr /* data */);
     }
 
     ~EventQueueGuard() {
@@ -75,7 +77,7 @@
     [[nodiscard]] SensorEventQueue* get() const { return mQueue.get(); }
 
   private:
-    sp<SensorEventQueue> mQueue;
+    const sp<SensorEventQueue> mQueue;
 };
 
 /**
@@ -95,10 +97,7 @@
         }
     }
 
-    SensorEnableGuard(const SensorEnableGuard&) = delete;
-    SensorEnableGuard& operator=(const SensorEnableGuard&) = delete;
-
-    // Enable moving.
+    // Enable move and delete default copy-ctor/copy-assignment.
     SensorEnableGuard(SensorEnableGuard&& other) : mQueue(other.mQueue), mSensor(other.mSensor) {
         other.mSensor = SensorPoseProvider::INVALID_HANDLE;
     }
@@ -131,7 +130,7 @@
         // Figure out the sensor's data format.
         DataFormat format = getSensorFormat(sensor);
         if (format == DataFormat::kUnknown) {
-            ALOGE("Unknown format for sensor %" PRId32, sensor);
+            ALOGE("%s: Unknown format for sensor %" PRId32, __func__, sensor);
             return false;
         }
 
@@ -145,17 +144,19 @@
 
         // Enable the sensor.
         if (mQueue->enableSensor(sensor, samplingPeriod.count(), 0, 0)) {
-            ALOGE("Failed to enable sensor");
+            ALOGE("%s: Failed to enable sensor %" PRId32, __func__, sensor);
             std::lock_guard lock(mMutex);
             mEnabledSensorsExtra.erase(sensor);
             return false;
         }
 
-        mEnabledSensors.emplace(sensor, SensorEnableGuard(mQueue.get(), sensor));
+        mEnabledSensors.emplace(sensor, SensorEnableGuard(mQueue, sensor));
+        ALOGD("%s: Sensor %" PRId32 " started", __func__, sensor);
         return true;
     }
 
     void stopSensor(int handle) override {
+        ALOGD("%s: Sensor %" PRId32 " stopped", __func__, handle);
         mEnabledSensors.erase(handle);
         std::lock_guard lock(mMutex);
         mEnabledSensorsExtra.erase(handle);
@@ -223,9 +224,9 @@
     Listener* const mListener;
     SensorManager* const mSensorManager;
     std::timed_mutex mMutex;
+    sp<SensorEventQueue> mQueue;
     std::map<int32_t, SensorEnableGuard> mEnabledSensors;
     std::map<int32_t, SensorExtra> mEnabledSensorsExtra GUARDED_BY(mMutex);
-    sp<SensorEventQueue> mQueue;
 
     // We must do some of the initialization operations on the worker thread, because the API relies
     // on the thread-local looper. In addition, as a matter of convenience, we store some of the
@@ -246,7 +247,13 @@
     bool waitInitFinished() { return mInitPromise.get_future().get(); }
 
     void threadFunc() {
-        // Obtain looper.
+        // Name our std::thread to help identification.  As is, canCallJava == false.
+        androidSetThreadName("SensorPoseProvider-looper");
+
+        // Run at the highest non-realtime priority.
+        androidSetThreadPriority(gettid(), PRIORITY_URGENT_AUDIO);
+
+        // The looper is started on the created std::thread.
         mLooper = Looper::prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
 
         // Create event queue.
@@ -263,7 +270,8 @@
         initFinished(true);
 
         while (!mQuit) {
-            int ret = mLooper->pollOnce(-1 /* no timeout */, nullptr, nullptr, nullptr);
+            const int ret = mLooper->pollOnce(-1 /* no timeout */, nullptr /* outFd */,
+                    nullptr /* outEvents */, nullptr /* outData */);
 
             switch (ret) {
                 case ALOOPER_POLL_WAKE:
@@ -276,7 +284,13 @@
                     break;
 
                 default:
-                    ALOGE("Unexpected status out of Looper::pollOnce: %d", ret);
+                    // Besides WAKE and kIdent, there should be no timeouts, callbacks,
+                    // ALOOPER_POLL_ERROR, or other events.
+                    // Exit now to avoid high frequency log spam on error,
+                    // e.g. if the fd becomes invalid (b/31093485).
+                    ALOGE("%s: Unexpected status out of Looper::pollOnce: %d", __func__, ret);
+                    mQuit = true;
+                    continue;
             }
 
             // Process an event.
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index a0bc8ca..6497b58 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -129,6 +129,7 @@
       mRTPCVOExtMap(-1),
       mRTPCVODegrees(0),
       mRTPSockDscp(0),
+      mRTPSockOptEcn(0),
       mRTPSockNetwork(0),
       mLastSeqNo(0),
       mStarted(false),
@@ -910,6 +911,13 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpEcn(int32_t ecn) {
+    ALOGV("setParamRtpEcn: %d", ecn);
+
+    mRTPSockOptEcn = ecn;
+    return OK;
+}
+
 status_t StagefrightRecorder::requestIDRFrame() {
     status_t ret = BAD_VALUE;
     if (mVideoEncoderSource != NULL) {
@@ -1091,6 +1099,11 @@
         if (safe_strtoi32(value.string(), &dscp)) {
             return setParamRtpDscp(dscp);
         }
+    } else if (key == "rtp-param-set-socket-ecn") {
+        int32_t targetEcn;
+        if (safe_strtoi32(value.string(), &targetEcn)) {
+            return setParamRtpEcn(targetEcn);
+        }
     } else if (key == "rtp-param-set-socket-network") {
         int64_t networkHandle;
         if (safe_strtoi64(value.string(), &networkHandle)) {
@@ -1272,6 +1285,9 @@
             if (mRTPSockDscp > 0) {
                 meta->setInt32(kKeyRtpDscp, mRTPSockDscp);
             }
+            if (mRTPSockOptEcn > 0) {
+                meta->setInt32(kKeyRtpEcn, mRTPSockOptEcn);
+            }
 
             status = mWriter->start(meta.get());
             break;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index d7785da..0801101 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -153,6 +153,7 @@
     int32_t mRTPCVOExtMap;
     int32_t mRTPCVODegrees;
     int32_t mRTPSockDscp;
+    int32_t mRTPSockOptEcn;
     int64_t mRTPSockNetwork;
     uint32_t mLastSeqNo;
 
@@ -247,6 +248,7 @@
     status_t setRTPCVOExtMap(int32_t extmap);
     status_t setRTPCVODegrees(int32_t cvoDegrees);
     status_t setParamRtpDscp(int32_t dscp);
+    status_t setParamRtpEcn(int32_t ecn);
     status_t setSocketNetwork(int64_t networkHandle);
     status_t requestIDRFrame();
     void clipVideoBitRate();
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index 6a17972..fd03150 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -115,7 +115,7 @@
 
         int sockRtp, sockRtcp;
         ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
-                info->mLocalPort, info->mRemotePort, info->mSocketNetwork);
+                info->mLocalPort, info->mRemotePort, info->mSocketNetwork, info->mRtpSockOptEcn);
 
         sp<AMessage> notify = new AMessage('accu', this);
 
@@ -125,6 +125,8 @@
         mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
         mRTPConn->setSelfID(info->mSelfID);
         mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+        mRTPConn->setRtpSockOptEcn(info->mRtpSockOptEcn);
+        mRTPConn->setIsIPv6(info->mLocalIp);
 
         unsigned long PT;
         AString formatDesc, formatParams;
@@ -719,6 +721,8 @@
     } else if (key == "rtp-param-set-socket-network") {
         int64_t networkHandle = atoll(value);
         setSocketNetwork(networkHandle);
+    } else if (key == "rtp-param-set-socket-ecn") {
+        info->mRtpSockOptEcn = atoi(value);
     } else if (key == "rtp-param-jitter-buffer-time") {
         // clamping min at 40, max at 3000
         info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
index 7d9bb8f..b2afe86 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/RTPSource.h
@@ -121,6 +121,8 @@
         uint32_t mSelfID;
         /* extmap:<value> for CVO will be set to here */
         int32_t mCVOExtMap;
+        /* To check ECN is supported or not */
+        int32_t mRtpSockOptEcn;
 
         /* a copy of TrackInfo in RTSPSource */
         sp<AnotherPacketSource> mSource;
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9607425..842327d 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,7 +150,7 @@
 
     if (camera == 0) {
         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index b84a056..3044c20 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -354,8 +354,11 @@
 }
 
 //static
-Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
-std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
+// these are no_destroy to keep them from being destroyed at process exit
+// where some thread calls exit() while other threads are still running.
+// see b/194783918
+[[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
+[[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
 
 //static
 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
@@ -807,9 +810,7 @@
       mWidth(0),
       mHeight(0),
       mRotationDegrees(0),
-      mConfigColorTransfer(-1),
-      mHDRStaticInfo(false),
-      mHDR10PlusInfo(false),
+      mHdrInfoFlags(0),
       mDequeueInputTimeoutGeneration(0),
       mDequeueInputReplyID(0),
       mDequeueOutputTimeoutGeneration(0),
@@ -968,29 +969,73 @@
                               mIndexOfFirstFrameWhenLowLatencyOn);
     }
 
-    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
-    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
 #if 0
     // enable for short term, only while debugging
     updateEphemeralMediametrics(mMetricsHandle);
 #endif
 }
 
-void MediaCodec::updateHDRFormatMetric() {
+void MediaCodec::updateHdrMetrics(bool isConfig) {
+    if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
+        return;
+    }
+
+    int32_t colorStandard = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
+    }
+    int32_t colorRange = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
+    }
+    int32_t colorTransfer = -1;
+    if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+        mediametrics_setInt32(mMetricsHandle,
+                isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
+    }
+    HDRStaticInfo info;
+    if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
+            && ColorUtils::isHDRStaticInfoValid(&info)) {
+        mHdrInfoFlags |= kFlagHasHdrStaticInfo;
+    }
+    mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo,
+            (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
+    sp<ABuffer> hdr10PlusInfo;
+    if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
+            && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
+        mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
+    }
+    mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo,
+            (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
+
+    // hdr format
+    sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
+
+    AString mime;
     int32_t profile = -1;
-    AString mediaType;
-    if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
-            && mOutputFormat->findString("mime", &mediaType)) {
-        hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+
+    if (codedFormat->findString("mime", &mime)
+            && codedFormat->findInt32(KEY_PROFILE, &profile)
+            && colorTransfer != -1) {
+        hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
         mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
     }
 }
 
-hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
-        const AString &mediaType) {
-    switch (transfer) {
+hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    return (mFlags & kFlagIsEncoder)
+            ? getHdrFormatForEncoder(mime, profile, colorTransfer)
+            : getHdrFormatForDecoder(mime, profile, colorTransfer);
+}
+
+hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    switch (colorTransfer) {
         case COLOR_TRANSFER_ST2084:
-            if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+            if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
                 switch (profile) {
                     case VP9Profile2HDR:
                         return HDR_FORMAT_HDR10;
@@ -999,7 +1044,7 @@
                     default:
                         return HDR_FORMAT_NONE;
                 }
-            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+            } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
                 switch (profile) {
                     case AV1ProfileMain10HDR10:
                         return HDR_FORMAT_HDR10;
@@ -1008,7 +1053,7 @@
                     default:
                         return HDR_FORMAT_NONE;
                 }
-            } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+            } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
                 switch (profile) {
                     case HEVCProfileMain10HDR10:
                         return HDR_FORMAT_HDR10;
@@ -1021,7 +1066,7 @@
                 return HDR_FORMAT_NONE;
             }
         case COLOR_TRANSFER_HLG:
-            if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+            if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
                 return HDR_FORMAT_HLG;
             } else {
                 // TODO: DOLBY format
@@ -1032,6 +1077,47 @@
     }
 }
 
+hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+        const int32_t colorTransfer) {
+    switch (colorTransfer) {
+        case COLOR_TRANSFER_ST2084:
+            if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
+                return HDR_FORMAT_NONE;
+            }
+            return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
+        case COLOR_TRANSFER_HLG:
+            if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+                return HDR_FORMAT_HLG;
+            }
+            // TODO: DOLBY format
+    }
+    return HDR_FORMAT_NONE;
+}
+
+bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
+    if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+        return true;
+    } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+        switch (profile) {
+            case VP9Profile2:
+            case VP9Profile3:
+            case VP9Profile2HDR:
+            case VP9Profile3HDR:
+            case VP9Profile2HDR10Plus:
+            case VP9Profile3HDR10Plus:
+                return true;
+        }
+    } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        switch (profile) {
+            case HEVCProfileMain10:
+            case HEVCProfileMain10HDR10:
+            case HEVCProfileMain10HDR10Plus:
+                return true;
+        }
+    }
+    return false;
+}
+
 
 // called to update info being passed back via getMetrics(), which is a
 // unique copy for that call, no concurrent access worries.
@@ -1081,6 +1167,7 @@
 
     // ensure mutex while we do our own work
     Mutex::Autolock _lock(mMetricsLock);
+    mHdrInfoFlags = 0;
     if (mMetricsHandle != 0) {
         if (mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
@@ -1735,24 +1822,6 @@
                     mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
                 }
             }
-            int32_t colorStandard = -1;
-            if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorStandard, colorStandard);
-            }
-            int32_t colorRange = -1;
-            if (format->findInt32(KEY_COLOR_RANGE, &colorRange)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorRange, colorRange);
-            }
-            int32_t colorTransfer = -1;
-            if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
-                mConfigColorTransfer = colorTransfer;
-                mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
-            }
-            HDRStaticInfo info;
-            if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
-                    && ColorUtils::isHDRStaticInfoValid(&info)) {
-                mHDRStaticInfo = true;
-            }
         }
 
         // Prevent possible integer overflow in downstream code.
@@ -3439,8 +3508,6 @@
                     CHECK(msg->findMessage("input-format", &mInputFormat));
                     CHECK(msg->findMessage("output-format", &mOutputFormat));
 
-                    updateHDRFormatMetric();
-
                     // limit to confirming the opt-in behavior to minimize any behavioral change
                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
                         // signal frame dropping mode in the input format as this may also be
@@ -3483,6 +3550,7 @@
                         if (interestingFormat->findInt32("level", &level)) {
                             mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
                         }
+                        updateHdrMetrics(true /* isConfig */);
                         // bitrate and bitrate mode, encoder only
                         if (mFlags & kFlagIsEncoder) {
                             // encoder specific values
@@ -3522,7 +3590,6 @@
                                 mComponentName.c_str(),
                                 mInputFormat->debugString(4).c_str(),
                                 mOutputFormat->debugString(4).c_str());
-                        updateHDRFormatMetric();
                         CHECK(obj != NULL);
                         response->setObject("input-surface", obj);
                         mHaveInputSurface = true;
@@ -3547,7 +3614,6 @@
                     if (!msg->findInt32("err", &err)) {
                         CHECK(msg->findMessage("input-format", &mInputFormat));
                         CHECK(msg->findMessage("output-format", &mOutputFormat));
-                        updateHDRFormatMetric();
                         mHaveInputSurface = true;
                     } else {
                         response->setInt32("err", err);
@@ -4760,7 +4826,6 @@
         buffer->meta()->setObject("changedKeys", changedKeys);
     }
     mOutputFormat = format;
-    updateHDRFormatMetric();
     mapFormat(mComponentName, format, nullptr, true);
     ALOGV("[%s] output format changed to: %s",
             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4786,9 +4851,6 @@
             HDRStaticInfo info;
             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
                 setNativeWindowHdrMetadata(mSurface.get(), &info);
-                if (ColorUtils::isHDRStaticInfoValid(&info)) {
-                    mHDRStaticInfo = true;
-                }
             }
         }
 
@@ -4797,7 +4859,6 @@
                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
-            mHDR10PlusInfo = true;
         }
 
         if (mime.startsWithIgnoreCase("video/")) {
@@ -4843,21 +4904,8 @@
         }
     }
 
-    if (mMetricsHandle != 0) {
-        int32_t colorStandard = -1;
-        if (format->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorStandard, colorStandard);
-        }
-        int32_t colorRange = -1;
-        if (format->findInt32( KEY_COLOR_RANGE, &colorRange)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorRange, colorRange);
-        }
-        int32_t colorTransfer = -1;
-        if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecParsedColorTransfer, colorTransfer);
-        }
-    }
-}
+    updateHdrMetrics(false /* isConfig */);
+ }
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index bc6765c..6644e8e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -456,12 +456,19 @@
     int32_t mRotationDegrees;
     int32_t mAllowFrameDroppingBySurface;
 
-    int32_t mConfigColorTransfer;
-    bool mHDRStaticInfo;
-    bool mHDR10PlusInfo;
-    void updateHDRFormatMetric();
-    hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
-            const AString &mediaType);
+    enum {
+        kFlagHasHdrStaticInfo   = 1,
+        kFlagHasHdr10PlusInfo   = 2,
+    };
+    uint32_t mHdrInfoFlags;
+    void updateHdrMetrics(bool isConfig);
+    hdr_format getHdrFormat(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    hdr_format getHdrFormatForEncoder(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    hdr_format getHdrFormatForDecoder(const AString &mime, const int32_t profile,
+            const int32_t colorTransfer);
+    bool profileSupport10Bits(const AString &mime, const int32_t profile);
 
     // initial create parameters
     AString mInitName;
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 88c1f3f..31faafb 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -267,6 +267,7 @@
     kKeyRtpExtMap        = 'extm', // int32_t, rtp extension ID for cvo on RTP protocol.
     kKeyRtpCvoDegrees    = 'cvod', // int32_t, rtp cvo degrees as per 3GPP 26.114.
     kKeyRtpDscp          = 'dscp', // int32_t, DSCP(Differentiated services codepoint) of RFC 2474.
+    kKeyRtpEcn           = 'sEcn', // int32_t, ECN (Explicit Congestion Notification) of RFC 3168
     kKeySocketNetwork    = 'sNet', // int64_t, socket will be bound to network handle.
 
     // Slow-motion markers
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 88f7be7..100c0cd 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -46,7 +46,6 @@
       mFirstIFrameProvided(false),
       mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
-      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 }
@@ -123,20 +122,11 @@
     }
 
     sp<ABuffer> buffer = *queue->begin();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
     buffer->meta()->setObject("source", source);
 
-    /**
-     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
-     * But that is not useful as an ingredient of buffering time.
-     * Instead, we calculates the time only for all 'NAL units'.
-     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
     int64_t nowTimeUs = ALooper::GetNowUs();
-    if (rtpTime != mLastRtpTimeJitterDataUs) {
-        source->putBaseJitterData(rtpTime, nowTimeUs);
-        mLastRtpTimeJitterDataUs = rtpTime;
-    }
-    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -168,7 +158,7 @@
     const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
     const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
     /* Fundamental jitter time */
-    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
     const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
     // Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -177,7 +167,7 @@
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
 
     // From (T), this assembler tries to complete the NAL till (T + try)
-    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int32_t tryJbTimeMs = dynamicJbTimeMs;
     int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
     bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
 
@@ -208,10 +198,10 @@
         String8 info;
         info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %d + (%d + %d * %.3f)",
+                    "JitterMs [%d + (~%d~)] + %d * %.3f",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
-                    buffer->int32Data(), mNextExpectedSeqNo,
-                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+                    seqNum, mNextExpectedSeqNo,
+                    baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
         if (isSecondLineBroken) {
             ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -223,6 +213,9 @@
     }
 
     if (mNextExpectedSeqNoValid) {
+        if (mNextExpectedSeqNo > seqNum) {
+            ALOGE("Reversed exp seq# %d \t current head %d", mNextExpectedSeqNo, seqNum);
+        }
         mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
         int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
 
@@ -241,10 +234,10 @@
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
-        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
-    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
-        ALOGV("Not the sequence number I expected");
-
+        mNextExpectedSeqNo = seqNum;
+    } else if (seqNum != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+                mNextExpectedSeqNo, seqNum);
         return WRONG_SEQUENCE_NUMBER;
     }
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index 72dd981..7b5c24a 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -53,7 +53,6 @@
       mFirstIFrameProvided(false),
       mLastCvo(-1),
       mLastIFrameProvidedAtMs(0),
-      mLastRtpTimeJitterDataUs(0),
       mWidth(0),
       mHeight(0) {
 
@@ -133,20 +132,11 @@
     }
 
     sp<ABuffer> buffer = *queue->begin();
+    uint32_t seqNum = (uint32_t)buffer->int32Data();
     buffer->meta()->setObject("source", source);
 
-    /**
-     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
-     * But that is not useful as an ingredient of buffering time.
-     * Instead, we calculates the time only for all 'NAL units'.
-     */
     int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
     int64_t nowTimeUs = ALooper::GetNowUs();
-    if (rtpTime != mLastRtpTimeJitterDataUs) {
-        source->putBaseJitterData(rtpTime, nowTimeUs);
-        mLastRtpTimeJitterDataUs = rtpTime;
-    }
-    source->putInterArrivalJitterData(rtpTime, nowTimeUs);
 
     const int64_t startTimeMs = source->mSysAnchorTime / 1000;
     const int64_t nowTimeMs = nowTimeUs / 1000;
@@ -178,7 +168,7 @@
     const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
     const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
     /* Fundamental jitter time */
-    const int32_t jitterTimeMs = baseJbTimeMs;
+    const int32_t jitterTimeMs = baseJbTimeMs + dynamicJbTimeMs;
     const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
 
     // Till (T), this assembler waits unconditionally to collect current NAL unit
@@ -187,7 +177,7 @@
     bool isExpired = (diffTimeRtp >= 0);                    // It's expired if T is passed away
 
     // From (T), this assembler tries to complete the NAL till (T + try)
-    int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+    int32_t tryJbTimeMs = dynamicJbTimeMs;
     int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
     bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
 
@@ -218,10 +208,10 @@
         String8 info;
         info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
                     "Seq# %d \t ExpSeq# %d \t"
-                    "JitterMs %d + (%d + %d * %.3f)",
+                    "JitterMs [%d + (~%d~)] + %d * %.3f",
                     (long long)diffTimeRtp, (long long)totalDiffTimeMs,
-                    buffer->int32Data(), mNextExpectedSeqNo,
-                    jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+                    seqNum, mNextExpectedSeqNo,
+                    baseJbTimeMs, dynamicJbTimeMs, tryJbTimeMs, JITTER_MULTIPLE);
         if (isSecondLineBroken) {
             ALOGE("%s", info.string());
             printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
@@ -251,10 +241,10 @@
 
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
-        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
-    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
-        ALOGV("Not the sequence number I expected");
-
+        mNextExpectedSeqNo = seqNum;
+    } else if (seqNum != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number(%d) I expected. Actual seq# is %d",
+                mNextExpectedSeqNo, seqNum);
         return WRONG_SEQUENCE_NUMBER;
     }
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a61f48f..165c336 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -16,6 +16,12 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ARTPConnection"
+#define INET_ECN_NOT_ECT    0x00    /* ECN was not enabled */
+#define INET_ECN_ECT_1      0x01    /* ECN capable packet */
+#define INET_ECN_ECT_0      0x02    /* ECN capable packet */
+#define INET_ECN_CE         0x03    /* ECN congestion */
+#define INET_ECN_MASK       0x03    /* Mask of ECN bits */
+
 #include <utils/Log.h>
 
 #include <media/stagefright/rtsp/ARTPAssembler.h>
@@ -56,6 +62,7 @@
 
 // static
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
+const int64_t ARTPConnection::kMinOneSecondNotifyDelayUs = 100000ll;
 
 struct ARTPConnection::StreamInfo {
     bool isIPv6;
@@ -84,7 +91,10 @@
       mPollEventPending(false),
       mLastReceiverReportTimeUs(-1),
       mLastBitrateReportTimeUs(-1),
+      mLastCongestionNotifyTimeUs(-1),
       mTargetBitrate(-1),
+      mRtpSockOptEcn(0),
+      mIsIPv6(false),
       mStaticJitterTimeMs(kStaticJitterTimeMs) {
 }
 
@@ -175,7 +185,7 @@
 // static
 void ARTPConnection::MakeRTPSocketPair(
         int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
-        unsigned localPort, unsigned remotePort, int64_t socketNetwork) {
+        unsigned localPort, unsigned remotePort, int64_t socketNetwork, int32_t sockOptEcn) {
     bool isIPv6 = false;
     if (strchr(localIp, ':') != NULL)
         isIPv6 = true;
@@ -204,6 +214,24 @@
         }
     }
 
+    if (sockOptEcn != 0) {
+        int sockOptForTOS = 1;
+        if (setsockopt(*rtpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+               isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+               (int *)&sockOptForTOS, sizeof(sockOptForTOS)) < 0) {
+            ALOGE("failed to set recv sockopt TOS on rtpsock(%d). err=%s", *rtpSocket,
+                strerror(errno));
+        } else {
+            ALOGD("successfully set recv sockopt TOS on rtpsock(%d)", *rtpSocket);
+            int result = setsockopt(*rtcpSocket, isIPv6 ? IPPROTO_IPV6 : IPPROTO_IP,
+                isIPv6 ? IPV6_RECVTCLASS : IP_RECVTOS,
+                (int *)&sockOptForTOS, sizeof(sockOptForTOS));
+            if (result >= 0) {
+                ALOGD("successfully set recv sockopt TOS on rtcpsock(%d).", *rtcpSocket);
+            }
+        }
+    }
+
     bumpSocketBufferSize(*rtcpSocket);
 
     struct sockaddr *addr;
@@ -593,32 +621,25 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
-    struct sockaddr *pRemoteRTCPAddr;
-    int sizeSockSt;
-    if (s->isIPv6) {
-        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
-        sizeSockSt = sizeof(struct sockaddr_in6);
-    } else {
-        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
-        sizeSockSt = sizeof(struct sockaddr_in);
-    }
-    socklen_t remoteAddrLen =
-        (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeSockSt : 0;
+    struct msghdr sMsg = {};
+    struct iovec sIov[1] = {};
 
-    if (mFlags & kViLTEConnection) {
-        remoteAddrLen = 0;
-    }
+    sIov[0].iov_base = (char *) buffer->data();
+    sIov[0].iov_len = buffer->capacity();
+
+    sMsg.msg_iov = sIov;
+    sMsg.msg_iovlen = 1;
+
+    int cMsgSize = sizeof(struct cmsghdr) + sizeof(uint8_t);
+    char buf[CMSG_SPACE(cMsgSize)];
+    sMsg.msg_control = buf;
+    sMsg.msg_controllen = sizeof(buf);
+    sMsg.msg_flags = 0;
 
     ssize_t nbytes;
     do {
-        nbytes = recvfrom(
-            receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
-            buffer->data(),
-            buffer->capacity(),
-            0,
-            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
-            remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+        // Used recvmsg to get the TOS header of incoming packet
+        nbytes = recvmsg(receiveRTP ? s->mRTPSocket : s->mRTCPSocket, &sMsg, 0);
         mCumulativeBytes += nbytes;
     } while (nbytes < 0 && errno == EINTR);
 
@@ -633,6 +654,10 @@
         }
     }
 
+    if (nbytes > 0) {
+        handleIpHeadersIfReceived(s, sMsg);
+    }
+
     buffer->setRange(0, nbytes);
 
     // ALOGI("received %d bytes.", buffer->size());
@@ -647,13 +672,68 @@
     return err;
 }
 
+/* This function will check if TOS is present or not in received IP packet.
+ * After that if it is present then it will notify about congestion to upper
+ * layer if CE bit is set in TOS header.
+ **/
+void ARTPConnection::handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg) {
+    struct cmsghdr *cMsg;
+    cMsg = CMSG_FIRSTHDR(&sMsg);
+
+    if (cMsg == NULL) {
+        ALOGV("cmsg is null");
+    }
+
+    for (; cMsg != NULL; cMsg = CMSG_NXTHDR(&sMsg, cMsg)) {
+        bool isTOSHeader = ((cMsg->cmsg_level == (mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP))
+                              && (cMsg->cmsg_type == (mIsIPv6 ? IPV6_TCLASS : IP_TOS))
+                              && (cMsg->cmsg_len));
+        if (isTOSHeader) {
+            uint8_t receivedTOS;
+            receivedTOS = *((uint8_t *) CMSG_DATA(cMsg));
+            // checking CE bit is set
+            bool isCEBitMarked = ((receivedTOS & INET_ECN_MASK) == INET_ECN_CE);
+
+            ALOGV("receivedTos(value -> %d)", receivedTOS);
+
+            if (isCEBitMarked) {
+                ALOGD("receivedTos(value -> %d), is ECN CE marked = %d",
+                    receivedTOS, isCEBitMarked);
+                notifyCongestionToUpperLayerIfNeeded(s);
+            }
+            break;
+        }
+    }
+}
+
+/* this function will be use to notify congestion in video call to upper layer */
+void ARTPConnection::notifyCongestionToUpperLayerIfNeeded(StreamInfo *s) {
+    int64_t nowUs = ALooper::GetNowUs();
+
+    if (mLastCongestionNotifyTimeUs <= 0) {
+        mLastCongestionNotifyTimeUs = nowUs;
+    }
+
+    bool isNeedToUpdate = (mLastCongestionNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs);
+    ALOGD("ECN info set by upper layer=%d, isNeedToUpdate=%d", mRtpSockOptEcn, isNeedToUpdate);
+
+    if ((mRtpSockOptEcn != 0) && (isNeedToUpdate)) {
+        sp<AMessage> notify = s->mNotifyMsg->dup();
+        notify->setInt32("rtcp-event", 1);
+        notify->setInt32("payload-type", ARTPSource::RTP_QUALITY_CD);
+        notify->post();
+        mLastCongestionNotifyTimeUs = nowUs;
+        ALOGD("Congestion detected in n/w, Notify upper layer");
+    }
+}
+
 ssize_t ARTPConnection::send(const StreamInfo *info, const sp<ABuffer> buffer) {
         struct sockaddr* pRemoteRTCPAddr;
         int sizeSockSt;
 
         /* It seems this isIPv6 variable is useless.
          * We should remove it to prevent confusion */
-        if (info->isIPv6) {
+        if (mIsIPv6) {
             pRemoteRTCPAddr = (struct sockaddr *)&info->mRemoteRTCPAddr6;
             sizeSockSt = sizeof(struct sockaddr_in6);
         } else {
@@ -1215,12 +1295,20 @@
     mTargetBitrate = targetBitrate;
 }
 
+void ARTPConnection::setRtpSockOptEcn(int32_t sockOptEcn) {
+    mRtpSockOptEcn = sockOptEcn;
+}
+
+void ARTPConnection::setIsIPv6(const char *localIp) {
+    mIsIPv6 = (strchr(localIp, ':') != nullptr);
+}
+
 void ARTPConnection::checkRxBitrate(int64_t nowUs) {
     if (mLastBitrateReportTimeUs <= 0) {
         mCumulativeBytes = 0;
         mLastBitrateReportTimeUs = nowUs;
     }
-    else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+    else if (mLastEarlyNotifyTimeUs + kMinOneSecondNotifyDelayUs <= nowUs) {
         int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
         int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
         mLastEarlyNotifyTimeUs = nowUs;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 717d8af..c5b0a1e 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -264,12 +264,12 @@
 
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     int64_t nowUs = ALooper::GetNowUs();
+    int64_t rtpTime = 0;
     uint32_t seqNum = (uint32_t)buffer->int32Data();
-    int32_t ssrc = 0, rtpTime = 0;
+    int32_t ssrc = 0;
 
     buffer->meta()->findInt32("ssrc", &ssrc);
     CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-    mLatestRtpTime = rtpTime;
 
     if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
         mFirstSysTime = nowUs;
@@ -277,7 +277,7 @@
         mLastSysAnchorTimeUpdatedUs = nowUs;
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
-        mFirstRtpTime = rtpTime;
+        mFirstRtpTime = (uint32_t)rtpTime;
         mFirstSsrc = ssrc;
         ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
                 mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
@@ -352,6 +352,18 @@
 
     mQueue.insert(it, buffer);
 
+    /**
+     * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+     * We calculate anothor jitter only for all 'Head NAL units'
+     */
+    ALOGV("<======== Insert %d", seqNum);
+    rtpTime = mAssembler->findRTPTime(mFirstRtpTime, buffer);
+    if (rtpTime != mLatestRtpTime) {
+        mJitterCalc->putBaseData(rtpTime, nowUs);
+    }
+    mJitterCalc->putInterArrivalData(rtpTime, nowUs);
+    mLatestRtpTime = rtpTime;
+
     return true;
 }
 
@@ -680,14 +692,6 @@
     mStaticJbTimeMs = jbTimeMs;
 }
 
-void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putBaseData(timeStamp, arrivalTime);
-}
-
-void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
-    mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
-}
-
 void ARTPSource::setJbTimer(const sp<AMessage> timer) {
     mJbTimer = timer;
 }
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 8990f0c..41f2d67 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -255,9 +255,34 @@
     if (params->findInt32(kKeyRtpCvoDegrees, &rtpCVODegrees))
         mRTPCVODegrees = rtpCVODegrees;
 
+    bool needToSetSockOpt = false;
     int32_t dscp = 0;
-    if (params->findInt32(kKeyRtpDscp, &dscp))
-        updateSocketDscp(dscp);
+    if (params->findInt32(kKeyRtpDscp, &dscp)) {
+        mRtpLayer3Dscp = dscp << 2;
+        needToSetSockOpt = true;
+    }
+
+    int32_t ecn = 0;
+    if (params->findInt32(kKeyRtpEcn, &ecn)) {
+        /*
+         * @ecn, possible value for ECN.
+         *  +-----+-----+
+         *  | ECN FIELD |
+         *  +-----+-----+
+         *    ECT   CE         [Obsolete] RFC 2481 names for the ECN bits.
+         *     0     0         Not-ECT
+         *     0     1         ECT (ECN-Capable Transport) (1)
+         *     1     0         ECT (ECN-Capable Transport) (0)
+         *     1     1         CE (Congestion Experienced)
+         *
+         */
+        mRtpSockOptEcn = ecn;
+        needToSetSockOpt = true;
+    }
+
+    if (needToSetSockOpt) {
+        updateSocketOpt();
+    }
 
     int64_t sockNetwork = 0;
     if (params->findInt64(kKeySocketNetwork, &sockNetwork))
@@ -1438,18 +1463,29 @@
     mPayloadType = payloadType;
 }
 
-void ARTPWriter::updateSocketDscp(int32_t dscp) {
-    mRtpLayer3Dscp = dscp << 2;
+/*
+ * This function will set socket option in IP header
+ */
+void ARTPWriter::updateSocketOpt() {
+    /*
+     * 0     1     2     3     4     5     6     7
+     * +-----+-----+-----+-----+-----+-----+-----+-----+
+     * |          DS FIELD, DSCP           | ECN FIELD |
+     * +-----+-----+-----+-----+-----+-----+-----+-----+
+     */
+    int sockOpt = mRtpLayer3Dscp ^ mRtpSockOptEcn;
+    ALOGD("Update socket opt with sockopt=%d, mRtpLayer3Dscp=%d, mRtpSockOptEcn=%d",
+                sockOpt, mRtpLayer3Dscp, mRtpSockOptEcn);
 
-    /* mRtpLayer3Dscp will be mapped to WMM(Wifi) as per operator's requirement */
-    if (setsockopt(mRTPSocket, IPPROTO_IP, IP_TOS,
-                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp)) < 0) {
-        ALOGE("failed to set dscp on rtpsock. err=%s", strerror(errno));
+    /* sockOpt will be used to set socket option in IP header */
+    if (setsockopt(mRTPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+                (int *)&sockOpt, sizeof(sockOpt)) < 0) {
+        ALOGE("failed to set sockopt on rtpsock. err=%s", strerror(errno));
     } else {
-        ALOGD("successfully set dscp on rtpsock. opt=%d", mRtpLayer3Dscp);
-        setsockopt(mRTCPSocket, IPPROTO_IP, IP_TOS,
-                (int *)&mRtpLayer3Dscp, sizeof(mRtpLayer3Dscp));
-        ALOGD("successfully set dscp on rtcpsock. opt=%d", mRtpLayer3Dscp);
+        ALOGD("successfully set sockopt. opt=%d", sockOpt);
+        setsockopt(mRTCPSocket, mIsIPv6 ? IPPROTO_IPV6 : IPPROTO_IP, mIsIPv6 ? IPV6_TCLASS : IP_TOS,
+                (int *)&sockOpt, sizeof(sockOpt));
+        ALOGD("successfully set sockopt rtcpsock. opt=%d", sockOpt);
     }
 }
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
index 2f8b8ba..70ce388 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AAVCAssembler.h
@@ -50,7 +50,6 @@
     bool mFirstIFrameProvided;
     int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
-    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
index 9575d8c..ed3f1ae 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/AHEVCAssembler.h
@@ -51,7 +51,6 @@
     bool mFirstIFrameProvided;
     int32_t mLastCvo;
     uint64_t mLastIFrameProvidedAtMs;
-    int64_t mLastRtpTimeJitterDataUs;
     int32_t mWidth;
     int32_t mHeight;
     List<sp<ABuffer> > mNALUnits;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index 39161b6..8f87642 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -44,6 +44,13 @@
     virtual void onByeReceived() = 0;
     virtual bool initCheck() { return true; }
 
+    // Utility functions
+    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
+    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
+    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
+    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
+
 protected:
     virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source) = 0;
     virtual void packetLost() = 0;
@@ -64,13 +71,6 @@
     bool mShowQueue;
     int32_t mShowQueueCnt;
 
-    // Utility functions
-    inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
-    inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
-    inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
-    inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
-    inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
-
 private:
     int64_t mFirstFailureTimeUs;
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 73d2866..250de71 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -20,6 +20,7 @@
 
 #include <media/stagefright/foundation/AHandler.h>
 #include <utils/List.h>
+#include <sys/socket.h>
 
 namespace android {
 
@@ -48,6 +49,8 @@
     void setSelfID(const uint32_t selfID);
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
     void setTargetBitrate(int32_t targetBitrate);
+    void setRtpSockOptEcn(int32_t sockOptEcn);
+    void setIsIPv6(const char *localIp);
 
     // Creates a pair of UDP datagram sockets bound to adjacent ports
     // (the rtpSocket is bound to an even port, the rtcpSocket to the
@@ -60,7 +63,8 @@
     static void MakeRTPSocketPair(
             int *rtpSocket, int *rtcpSocket,
             const char *localIp, const char *remoteIp,
-            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0);
+            unsigned localPort, unsigned remotePort, int64_t socketNetwork = 0,
+            int32_t sockOptEcn = 0);
 
 protected:
     virtual ~ARTPConnection();
@@ -77,6 +81,7 @@
     };
 
     static const int64_t kSelectTimeoutUs;
+    static const int64_t kMinOneSecondNotifyDelayUs;
 
     uint32_t mFlags;
 
@@ -87,9 +92,12 @@
     int64_t mLastReceiverReportTimeUs;
     int64_t mLastBitrateReportTimeUs;
     int64_t mLastEarlyNotifyTimeUs;
+    int64_t mLastCongestionNotifyTimeUs;
 
     int32_t mSelfID;
     int32_t mTargetBitrate;
+    int32_t mRtpSockOptEcn;
+    bool mIsIPv6;
 
     uint32_t mStaticJitterTimeMs;
 
@@ -103,6 +111,8 @@
     void onInjectPacket(const sp<AMessage> &msg);
     void onSendReceiverReports();
     void checkRxBitrate(int64_t nowUs);
+    void notifyCongestionToUpperLayerIfNeeded(StreamInfo *s);
+    void handleIpHeadersIfReceived(StreamInfo *s, struct msghdr sMsg);
 
     status_t receive(StreamInfo *info, bool receiveRTP);
     ssize_t send(const StreamInfo *info, const sp<ABuffer> buffer);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index e9b4942..7d1faf2 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -50,6 +50,7 @@
         RTCP_FIRST_PACKET = 101,
         RTP_QUALITY = 102,
         RTP_QUALITY_EMC = 103,
+        RTP_QUALITY_CD = 104,
         RTCP_SR = 200,
         RTCP_RR = 201,
         RTCP_TSFB = 205,
@@ -81,8 +82,6 @@
     int32_t getBaseJitterTimeMs();
     int32_t getInterArrivalJitterTimeMs();
     void setStaticJitterTimeMs(const uint32_t jbTimeMs);
-    void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
-    void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
     void setJbTimer(const sp<AMessage> timer);
     void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
 
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
index 2982cf6..ecd29d0 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPWriter.h
@@ -50,7 +50,7 @@
     virtual status_t pause();
     void updateCVODegrees(int32_t cvoDegrees);
     void updatePayloadType(int32_t payloadType);
-    void updateSocketDscp(int32_t dscp);
+    void updateSocketOpt();
     void updateSocketNetwork(int64_t socketNetwork);
     uint32_t getSequenceNum();
     virtual uint64_t getAccumulativeBytes() override;
@@ -98,6 +98,7 @@
     struct sockaddr_in6 mRTPAddr6;
     struct sockaddr_in6 mRTCPAddr6;
     int32_t mRtpLayer3Dscp;
+    int32_t mRtpSockOptEcn;
     net_handle_t mRTPSockNetwork;
 
     AString mProfileLevel;
diff --git a/media/utils/SchedulingPolicyService.cpp b/media/utils/SchedulingPolicyService.cpp
index ad38862..6e515ff 100644
--- a/media/utils/SchedulingPolicyService.cpp
+++ b/media/utils/SchedulingPolicyService.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include <binder/IServiceManager.h>
+#include <cutils/properties.h>
 #include <utils/Mutex.h>
 #include "ISchedulingPolicyService.h"
 #include "mediautils/SchedulingPolicyService.h"
@@ -86,4 +87,25 @@
     return ret;
 }
 
+int requestSpatializerPriority(pid_t pid, pid_t tid) {
+    if (pid == -1 || tid == -1) return BAD_VALUE;
+
+    // update priority to RT if specified.
+    constexpr int32_t kRTPriorityMin = 1;
+    constexpr int32_t kRTPriorityMax = 3;
+    const int32_t priorityBoost =
+            property_get_int32("audio.spatializer.priority", kRTPriorityMin);
+    if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+        const status_t status = requestPriority(
+                pid, tid, priorityBoost, false /* isForApp */, true /*asynchronous*/);
+        if (status != OK) {
+            ALOGW("%s: Cannot request spatializer priority boost %d, status:%d",
+                    __func__, priorityBoost, status);
+            return status < 0 ? status : UNKNOWN_ERROR;
+        }
+        return priorityBoost;
+    }
+    return 0;  // no boost requested
+}
+
 }   // namespace android
diff --git a/media/utils/include/mediautils/SchedulingPolicyService.h b/media/utils/include/mediautils/SchedulingPolicyService.h
index 546cec5..af1fcd2 100644
--- a/media/utils/include/mediautils/SchedulingPolicyService.h
+++ b/media/utils/include/mediautils/SchedulingPolicyService.h
@@ -23,7 +23,7 @@
 
 class IBinder;
 // Request elevated priority for thread tid, whose thread group leader must be pid.
-// The priority parameter is currently restricted to either 1 or 2.
+// The priority parameter is currently restricted from 1 to 3.
 // The asynchronous parameter should be 'true' to return immediately,
 // after the request is enqueued but not necessarily executed.
 // The default value 'false' means to return after request has been enqueued and executed.
@@ -37,6 +37,12 @@
 // 'client' is ignored in this case.
 int requestCpusetBoost(bool enable, const sp<IBinder> &client);
 
+// Audio: Request Spatializer RT priority for thread tid, whose thread group leader must be pid.
+// returns positive value if successful, the RT priority used
+//         zero, if no RT priority selected
+//         negative status code if RT priority unable to be set.
+int requestSpatializerPriority(pid_t pid, pid_t tid);
+
 }   // namespace android
 
 #endif  // _ANDROID_SCHEDULING_POLICY_SERVICE_H
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f94106f..d2363d8 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -3733,6 +3733,12 @@
 
         using namespace std::chrono_literals;
         auto inChannelMask = audio_channel_mask_out_to_in(track->channelMask());
+        if (inChannelMask == AUDIO_CHANNEL_INVALID) {
+            // The downstream PatchTrack has the proper output channel mask,
+            // so if there is no input channel mask equivalent, we can just
+            // use an index mask here to create the PatchRecord.
+            inChannelMask = audio_channel_mask_out_to_in_index_mask(track->channelMask());
+        }
         sp patchRecord = new RecordThread::PatchRecord(nullptr /* thread */,
                                                        track->sampleRate(),
                                                        inChannelMask,
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 24fe6f6..9ebff0b 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -1730,6 +1730,7 @@
     mNotifyFramesProcessed(notifyFramesProcessed)
 {
     ALOGV("constructor %p client %p", this, client.get());
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 
     if (client == 0) {
         return;
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 45dd258..b54b41f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -313,12 +313,19 @@
                         patch->sources[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
                         patch->sources[0].flags.input : AUDIO_INPUT_FLAG_NONE;
                 audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
+                audio_source_t source = AUDIO_SOURCE_MIC;
+                // For telephony patches, propagate voice communication use case to record side
+                if (patch->num_sources == 2
+                        && patch->sources[1].ext.mix.usecase.stream
+                                == AUDIO_STREAM_VOICE_CALL) {
+                    source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+                }
                 sp<ThreadBase> thread = mAudioFlinger.openInput_l(srcModule,
                                                                     &input,
                                                                     &config,
                                                                     device,
                                                                     address,
-                                                                    AUDIO_SOURCE_MIC,
+                                                                    source,
                                                                     flags,
                                                                     outputDevice,
                                                                     outputDeviceAddress);
@@ -516,9 +523,14 @@
     audio_output_flags_t outputFlags = mAudioPatch.sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS ?
             mAudioPatch.sinks[0].flags.output : AUDIO_OUTPUT_FLAG_NONE;
     audio_stream_type_t streamType = AUDIO_STREAM_PATCH;
+    audio_source_t source = AUDIO_SOURCE_DEFAULT;
     if (mAudioPatch.num_sources == 2 && mAudioPatch.sources[1].type == AUDIO_PORT_TYPE_MIX) {
         // "reuse one existing output mix" case
         streamType = mAudioPatch.sources[1].ext.mix.usecase.stream;
+        // For telephony patches, propagate voice communication use case to record side
+        if (streamType == AUDIO_STREAM_VOICE_CALL) {
+            source = AUDIO_SOURCE_VOICE_COMMUNICATION;
+        }
     }
     if (mPlayback.thread()->hasFastMixer()) {
         // Create a fast track if the playback thread has fast mixer to get better performance.
@@ -546,7 +558,8 @@
                                                  inChannelMask,
                                                  format,
                                                  frameCount,
-                                                 inputFlags);
+                                                 inputFlags,
+                                                 source);
     } else {
         // use a pseudo LCM between input and output framecount
         int playbackShift = __builtin_ctz(playbackFrameCount);
@@ -566,7 +579,9 @@
                                                  frameCount,
                                                  nullptr,
                                                  (size_t)0 /* bufferSize */,
-                                                 inputFlags);
+                                                 inputFlags,
+                                                 {} /* timeout */,
+                                                 source);
     }
     status = mRecord.checkTrack(tempRecordTrack.get());
     if (status != NO_ERROR) {
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index e8552c4..daec57e 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -87,6 +87,10 @@
                                     && (flags & AUDIO_INPUT_FLAG_HW_AV_SYNC) == 0;
                         }
 
+            using SinkMetadatas = std::vector<record_track_metadata_v7_t>;
+            using MetadataInserter = std::back_insert_iterator<SinkMetadatas>;
+            virtual void    copyMetadataTo(MetadataInserter& backInserter) const;
+
 private:
     friend class AudioFlinger;  // for mState
 
@@ -134,7 +138,8 @@
                 void *buffer,
                 size_t bufferSize,
                 audio_input_flags_t flags,
-                const Timeout& timeout = {});
+                const Timeout& timeout = {},
+                audio_source_t source = AUDIO_SOURCE_DEFAULT);
     virtual             ~PatchRecord();
 
     virtual Source* getSource() { return nullptr; }
@@ -166,7 +171,8 @@
                         audio_channel_mask_t channelMask,
                         audio_format_t format,
                         size_t frameCount,
-                        audio_input_flags_t flags);
+                        audio_input_flags_t flags,
+                        audio_source_t source = AUDIO_SOURCE_DEFAULT);
 
     Source* getSource() override { return static_cast<Source*>(this); }
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 104f238..73ee691 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -269,6 +269,23 @@
     return ss.str();
 }
 
+static std::string toString(audio_latency_mode_t mode) {
+    // We convert to the AIDL type to print (eventually the legacy type will be removed).
+    const auto result = legacy2aidl_audio_latency_mode_t_LatencyMode(mode);
+    return result.has_value() ? media::toString(*result) : "UNKNOWN";
+}
+
+// Could be made a template, but other toString overloads for std::vector are confused.
+static std::string toString(const std::vector<audio_latency_mode_t>& elements) {
+    std::string s("{ ");
+    for (const auto& e : elements) {
+        s.append(toString(e));
+        s.append(" ");
+    }
+    s.append("}");
+    return s;
+}
+
 static pthread_once_t sFastTrackMultiplierOnce = PTHREAD_ONCE_INIT;
 
 static void sFastTrackMultiplierInit()
@@ -7315,23 +7332,13 @@
         updateHalSupportedLatencyModes_l();
     }
 
-    // update priority if specified.
-    constexpr int32_t kRTPriorityMin = 1;
-    constexpr int32_t kRTPriorityMax = 3;
-    const int32_t priorityBoost =
-            property_get_int32("audio.spatializer.priority", kRTPriorityMin);
-    if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
-        const pid_t pid = getpid();
-        const pid_t tid = getTid();
-
-        if (tid == -1) {
-            // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
-            ALOGW("%s: audio.spatializer.priority %d ignored, thread not running",
-                    __func__, priorityBoost);
-        } else {
-            ALOGD("%s: audio.spatializer.priority %d, allowing real time for pid %d  tid %d",
-                    __func__, priorityBoost, pid, tid);
-            sendPrioConfigEvent_l(pid, tid, priorityBoost, false /*forApp*/);
+    const pid_t tid = getTid();
+    if (tid == -1) {
+        // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
+        ALOGW("%s: Cannot update Spatializer mixer thread priority, not running", __func__);
+    } else {
+        const int priorityBoost = requestSpatializerPriority(getpid(), tid);
+        if (priorityBoost > 0) {
             stream()->setHalThreadPriority(priorityBoost);
         }
     }
@@ -7347,10 +7354,13 @@
 
 void AudioFlinger::SpatializerThread::updateHalSupportedLatencyModes_l() {
     std::vector<audio_latency_mode_t> latencyModes;
-    if (mOutput->stream->getRecommendedLatencyModes(&latencyModes) != NO_ERROR) {
+    const status_t status = mOutput->stream->getRecommendedLatencyModes(&latencyModes);
+    if (status != NO_ERROR) {
         latencyModes.clear();
     }
     if (latencyModes != mSupportedLatencyModes) {
+        ALOGD("%s: thread(%d) status %d supported latency modes: %s",
+            __func__, mId, status, toString(latencyModes).c_str());
         mSupportedLatencyModes.swap(latencyModes);
         sendHalLatencyModesChangedEvent_l();
     }
@@ -7390,6 +7400,8 @@
 
     if (latencyMode != mSetLatencyMode) {
         status_t status = mOutput->stream->setLatencyMode(latencyMode);
+        ALOGD("%s: thread(%d) setLatencyMode(%s) returned %d",
+                __func__, mId, toString(latencyMode).c_str(), status);
         if (status == NO_ERROR) {
             mSetLatencyMode = latencyMode;
         }
@@ -7471,6 +7483,8 @@
         std::vector<audio_latency_mode_t> modes) {
     Mutex::Autolock _l(mLock);
     if (modes != mSupportedLatencyModes) {
+        ALOGD("%s: thread(%d) supported latency modes: %s",
+            __func__, mId, toString(modes).c_str());
         mSupportedLatencyModes.swap(modes);
         sendHalLatencyModesChangedEvent_l();
     }
@@ -8773,21 +8787,9 @@
         return; // nothing to do
     }
     StreamInHalInterface::SinkMetadata metadata;
+    auto backInserter = std::back_inserter(metadata.tracks);
     for (const sp<RecordTrack> &track : mActiveTracks) {
-        // Do not forward PatchRecord metadata to audio HAL
-        if (track->isPatchTrack()) {
-            continue;
-        }
-        // No track is invalid as this is called after prepareTrack_l in the same critical section
-        record_track_metadata_v7_t trackMetadata;
-        trackMetadata.base = {
-                .source = track->attributes().source,
-                .gain = 1, // capture tracks do not have volumes
-        };
-        trackMetadata.channel_mask = track->channelMask(),
-        strncpy(trackMetadata.tags, track->attributes().tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
-
-        metadata.tracks.push_back(trackMetadata);
+        track->copyMetadataTo(backInserter);
     }
     mInput->stream->updateSinkMetadata(metadata);
 }
@@ -10243,19 +10245,22 @@
 
 void AudioFlinger::MmapThread::checkInvalidTracks_l()
 {
+    sp<MmapStreamCallback> callback;
     for (const sp<MmapTrack> &track : mActiveTracks) {
         if (track->isInvalid()) {
-            sp<MmapStreamCallback> callback = mCallback.promote();
-            if (callback != 0) {
-                mLock.unlock();
-                callback->onTearDown(track->portId());
-                mLock.lock();
-            } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
-                ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
+            callback = mCallback.promote();
+            if (callback == nullptr &&  mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+                ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
             }
+            break;
         }
     }
+    if (callback != 0) {
+        mLock.unlock();
+        callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+        mLock.lock();
+    }
 }
 
 void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ac8909f..10f9f73 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -334,6 +334,7 @@
     : BnAudioTrack(),
       mTrack(track)
 {
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 AudioFlinger::TrackHandle::~TrackHandle() {
@@ -1477,7 +1478,7 @@
         }
     }
 
-    metadata.channel_mask = mChannelMask,
+    metadata.channel_mask = mChannelMask;
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
 }
@@ -2021,7 +2022,6 @@
 {
     Buffer *pInBuffer;
     Buffer inBuffer;
-    bool outputBufferFull = false;
     inBuffer.frameCount = frames;
     inBuffer.raw = data;
 
@@ -2051,7 +2051,6 @@
                 ALOGV("%s(%d): thread %d no more output buffers; status %d",
                         __func__, mId,
                         (int)mThreadIoHandle, status);
-                outputBufferFull = true;
                 break;
             }
             uint32_t waitTimeMs = (uint32_t)ns2ms(systemTime() - startTime);
@@ -2331,6 +2330,7 @@
     : BnAudioRecord(),
     mRecordTrack(recordTrack)
 {
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 AudioFlinger::RecordHandle::~RecordHandle() {
@@ -2747,6 +2747,25 @@
     }
 }
 
+void AudioFlinger::RecordThread::RecordTrack::copyMetadataTo(MetadataInserter& backInserter) const
+{
+
+    // Do not forward PatchRecord metadata with unspecified audio source
+    if (mAttr.source == AUDIO_SOURCE_DEFAULT) {
+        return;
+    }
+
+    // No track is invalid as this is called after prepareTrack_l in the same critical section
+    record_track_metadata_v7_t metadata;
+    metadata.base = {
+            .source = mAttr.source,
+            .gain = 1, // capture tracks do not have volumes
+    };
+    metadata.channel_mask = mChannelMask;
+    strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
+
+    *backInserter++ = metadata;
+}
 
 // ----------------------------------------------------------------------------
 #undef LOG_TAG
@@ -2760,9 +2779,10 @@
                                                      void *buffer,
                                                      size_t bufferSize,
                                                      audio_input_flags_t flags,
-                                                     const Timeout& timeout)
+                                                     const Timeout& timeout,
+                                                     audio_source_t source)
     :   RecordTrack(recordThread, NULL,
-                audio_attributes_t{} /* currently unused for patch track */,
+                audio_attributes_t{ .source = source } ,
                 sampleRate, format, channelMask, frameCount,
                 buffer, bufferSize, AUDIO_SESSION_NONE, getpid(),
                 audioServerAttributionSource(getpid()), flags, TYPE_PATCH),
@@ -2873,9 +2893,10 @@
         audio_channel_mask_t channelMask,
         audio_format_t format,
         size_t frameCount,
-        audio_input_flags_t flags)
+        audio_input_flags_t flags,
+        audio_source_t source)
         : PatchRecord(recordThread, sampleRate, channelMask, format, frameCount,
-                nullptr /*buffer*/, 0 /*bufferSize*/, flags),
+                nullptr /*buffer*/, 0 /*bufferSize*/, flags, {} /* timeout */, source),
           mPatchRecordAudioBufferProvider(*this),
           mSinkBuffer(allocAligned(32, mFrameCount * mFrameSize)),
           mStubBuffer(allocAligned(32, mFrameCount * mFrameSize))
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 0431619..7119b85 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -239,12 +239,13 @@
     }
     void setUseSwBridge() { mUseSwBridge = true; }
     bool useSwBridge() const { return mUseSwBridge; }
+    bool canCloseOutput() const { return mCloseOutput; }
     bool isConnected() const { return mPatchHandle != AUDIO_PATCH_HANDLE_NONE; }
     audio_patch_handle_t getPatchHandle() const { return mPatchHandle; }
     sp<DeviceDescriptor> srcDevice() const { return mSrcDevice; }
     sp<DeviceDescriptor> sinkDevice() const { return mSinkDevice; }
     wp<SwAudioOutputDescriptor> swOutput() const { return mSwOutput; }
-    void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput);
+    void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
 
@@ -258,6 +259,15 @@
     wp<SwAudioOutputDescriptor> mSwOutput;
     wp<HwAudioOutputDescriptor> mHwOutput;
     bool mUseSwBridge = false;
+    /**
+     * For either HW bridge associated to a SwOutput for activity / volume or SwBridge for also
+     * sample rendering / activity & volume, an existing playback thread may be reused (e.g.
+     * not already opened at APM startup or Direct Output).
+     * If reusing an already opened output, when this output is not used anymore, the AudioFlinger
+     * patch must be updated to refine the output device(s) information and ensure the right
+     * behavior of AudioDeviceCallback.
+     */
+    bool mCloseOutput = false;
 };
 
 /**
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 713b0ac..8b6866e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -105,9 +105,11 @@
 {
 }
 
-void SourceClientDescriptor::setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput)
+void SourceClientDescriptor::setSwOutput(
+        const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput)
 {
     mSwOutput = swOutput;
+    mCloseOutput = closeOutput;
 }
 
 void SourceClientDescriptor::setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput)
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index f9415fe..4573382 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -788,7 +788,8 @@
     ALOGV("%s between source %s and sink %s", __func__,
             srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
-    const audio_attributes_t aa = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
     struct audio_port_config source = {};
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new InternalSourceClientDescriptor(
@@ -3008,6 +3009,10 @@
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
+    // AUDIO_STREAM_BLUETOOTH_SCO is only used for volume control so we remap
+    // to AUDIO_STREAM_VOICE_CALL to match with relevant playback activity
+    VolumeSource activityVs = (vs == toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false)) ?
+            toVolumeSource(AUDIO_STREAM_VOICE_CALL, false) : vs;
     product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
 
     status = setVolumeCurveIndex(index, device, curves);
@@ -3046,7 +3051,8 @@
         if (curDevices.erase(AUDIO_DEVICE_OUT_SPEAKER_SAFE)) {
             curDevices.insert(AUDIO_DEVICE_OUT_SPEAKER);
         }
-        if (!(desc->isActive(vs) || isInCall())) {
+
+        if (!(desc->isActive(activityVs) || isInCallOrScreening())) {
             continue;
         }
         if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME &&
@@ -3080,7 +3086,7 @@
                 bool isPreempted = false;
                 bool isHigherPriority = productStrategy < strategy;
                 for (const auto &client : activeClients) {
-                    if (isHigherPriority && (client->volumeSource() != vs)) {
+                    if (isHigherPriority && (client->volumeSource() != activityVs)) {
                         ALOGV("%s: Strategy=%d (\nrequester:\n"
                               " group %d, volumeGroup=%d attributes=%s)\n"
                               " higher priority source active:\n"
@@ -3093,7 +3099,7 @@
                         break;
                     }
                     // However, continue for loop to ensure no higher prio clients running on output
-                    if (client->volumeSource() == vs) {
+                    if (client->volumeSource() == activityVs) {
                         applyVolume = true;
                     }
                 }
@@ -4534,7 +4540,7 @@
                 // In case of Hw bridge, it is a Work Around. The mixPort used is the one declared
                 // in config XML to reach the sink so that is can be declared as available.
                 audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-                sp<SwAudioOutputDescriptor> outputDesc = nullptr;
+                sp<SwAudioOutputDescriptor> outputDesc;
                 if (!sourceDesc->isInternal()) {
                     // take care of dynamic routing for SwOutput selection,
                     audio_attributes_t attributes = sourceDesc->attributes();
@@ -4563,7 +4569,8 @@
                         ALOGE("%s output is duplicated", __func__);
                         return INVALID_OPERATION;
                     }
-                    sourceDesc->setSwOutput(outputDesc);
+                    bool closeOutput = outputDesc->mDirectOpenCount != 0;
+                    sourceDesc->setSwOutput(outputDesc, closeOutput);
                 } else {
                     // Same for "raw patches" aka created from createAudioPatch API
                     SortedVector<audio_io_handle_t> outputs =
@@ -4582,7 +4589,7 @@
                               __func__, sinkDevice->toString().c_str());
                         return INVALID_OPERATION;
                     }
-                    sourceDesc->setSwOutput(outputDesc);
+                    sourceDesc->setSwOutput(outputDesc, /* closeOutput= */ false);
                 }
                 // create a software bridge in PatchPanel if:
                 // - source and sink devices are on different HW modules OR
@@ -4604,7 +4611,8 @@
                         audio_port_config srcMixPortConfig = {};
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
-                        srcMixPortConfig.ext.mix.usecase.stream = !sourceDesc->isInternal() ?
+                        srcMixPortConfig.ext.mix.usecase.stream =
+                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -4711,17 +4719,29 @@
                 // releaseOutput has already called closeOutput in case of direct output
                 return NO_ERROR;
             }
-            if (!outputDesc->isActive() && !sourceDesc->useSwBridge()) {
-                resetOutputDevice(outputDesc);
-            } else {
-                // Reuse patch handle if still valid / do not force rerouting if still routed
-                patchHandle = outputDesc->getPatchHandle();
-                setOutputDevices(outputDesc,
-                                 getNewOutputDevices(outputDesc, true /*fromCache*/),
-                                 patchHandle == AUDIO_PATCH_HANDLE_NONE, /*force*/
-                                 0,
-                                 patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
-            }
+            patchHandle = outputDesc->getPatchHandle();
+            // When a Sw bridge is released, the mixer used by this bridge will release its
+            // patch at AudioFlinger side. Hence, the mixer audio patch must be recreated
+            // Reuse patch handle to force audio flinger removing initial mixer patch removal
+            // updating hal patch handle (prevent leaks).
+            // While using a HwBridge, force reconsidering device only if not reusing an existing
+            // output and no more activity on output (will force to close).
+            bool force = sourceDesc->useSwBridge() ||
+                    (sourceDesc->canCloseOutput() && !outputDesc->isActive());
+            // APM pattern is to have always outputs opened / patch realized for reachable devices.
+            // Update device may result to NONE (empty), coupled with force, it releases the patch.
+            // Reconsider device only for cases:
+            //      1 / Active Output
+            //      2 / Inactive Output previously hosting HwBridge
+            //      3 / Inactive Output previously hosting SwBridge that can be closed.
+            bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
+                    sourceDesc->canCloseOutput();
+            setOutputDevices(outputDesc,
+                             updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
+                                            outputDesc->devices(),
+                             force,
+                             0,
+                             patchHandle == AUDIO_PATCH_HANDLE_NONE ? nullptr : &patchHandle);
         } else {
             return BAD_VALUE;
         }
@@ -6328,10 +6348,10 @@
     SortedVector<audio_io_handle_t> dstOutputs = getOutputsForDevices(newDevices, mOutputs);
 
     uint32_t maxLatency = 0;
-    bool invalidate = false;
+    std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
     // take into account dynamic audio policies related changes: if a client is now associated
     // to a different policy mix than at creation time, invalidate corresponding stream
-    for (size_t i = 0; i < mPreviousOutputs.size() && !invalidate; i++) {
+    for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
         if (desc->isDuplicated()) {
             continue;
@@ -6347,16 +6367,15 @@
                 continue;
             }
             if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
-                invalidate = true;
-                if (desc->isStrategyActive(psId)) {
+                if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
                     maxLatency = desc->latency();
                 }
-                break;
+                invalidatedOutputs.push_back(desc);
             }
         }
     }
 
-    if (srcOutputs != dstOutputs || invalidate) {
+    if (srcOutputs != dstOutputs || !invalidatedOutputs.empty()) {
         // get maximum latency of all source outputs to determine the minimum mute time guaranteeing
         // audio from invalidated tracks will be rendered when unmuting
         for (audio_io_handle_t srcOut : srcOutputs) {
@@ -6367,8 +6386,7 @@
                 maxLatency = desc->latency();
             }
 
-            if (invalidate) continue;
-
+            bool invalidate = false;
             for (auto client : desc->clientsList(false /*activeOnly*/)) {
                 if (desc->isDuplicated() || !desc->mProfile->isDirectOutput()) {
                     // a client on a non direct outputs has necessarily a linear PCM format
@@ -6396,21 +6414,14 @@
                     }
                 }
             }
-        }
-
-        ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
-              "%s: strategy %d, moving from output %s to output %s", __func__, psId,
-              std::to_string(srcOutputs[0]).c_str(),
-              std::to_string(dstOutputs[0]).c_str());
-        // mute strategy while moving tracks from one output to another
-        for (audio_io_handle_t srcOut : srcOutputs) {
-            sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
-            if (desc == nullptr) continue;
-
-            if (desc->isStrategyActive(psId)) {
-                setStrategyMute(psId, true, desc);
-                setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
-                                newDevices.types());
+            // mute strategy while moving tracks from one output to another
+            if (invalidate) {
+                invalidatedOutputs.push_back(desc);
+                if (desc->isStrategyActive(psId)) {
+                    setStrategyMute(psId, true, desc);
+                    setStrategyMute(psId, false, desc, maxLatency * LATENCY_MUTE_FACTOR,
+                                    newDevices.types());
+                }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
             if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
@@ -6418,19 +6429,21 @@
             }
         }
 
+        ALOGV_IF(!(srcOutputs.isEmpty() || dstOutputs.isEmpty()),
+              "%s: strategy %d, moving from output %s to output %s", __func__, psId,
+              std::to_string(srcOutputs[0]).c_str(),
+              std::to_string(dstOutputs[0]).c_str());
+
         // Move effects associated to this stream from previous output to new output
         if (followsSameRouting(attr, attributes_initializer(AUDIO_USAGE_MEDIA))) {
             selectOutputForMusicEffects();
         }
         // Move tracks associated to this stream (and linked) from previous output to new output
-        if (invalidate) {
+        if (!invalidatedOutputs.empty()) {
             for (auto stream :  mEngine->getStreamTypesForProductStrategy(psId)) {
                 mpClientInterface->invalidateStream(stream);
             }
-            for (audio_io_handle_t srcOut : srcOutputs) {
-                sp<SwAudioOutputDescriptor> desc = mPreviousOutputs.valueFor(srcOut);
-                if (desc == nullptr) continue;
-
+            for (sp<SwAudioOutputDescriptor> desc : invalidatedOutputs) {
                 desc->setTracksInvalidatedStatusByStrategy(psId);
             }
         }
@@ -7491,14 +7504,18 @@
     return is_state_in_call(state);
 }
 
-bool AudioPolicyManager::isCallAudioAccessible()
-{
+bool AudioPolicyManager::isCallAudioAccessible() const {
     audio_mode_t mode = mEngine->getPhoneState();
     return (mode == AUDIO_MODE_IN_CALL)
             || (mode == AUDIO_MODE_CALL_SCREEN)
             || (mode == AUDIO_MODE_CALL_REDIRECT);
 }
 
+bool AudioPolicyManager::isInCallOrScreening() const {
+    audio_mode_t mode = mEngine->getPhoneState();
+    return isStateInCall(mode) || mode == AUDIO_MODE_CALL_SCREEN;
+}
+
 void AudioPolicyManager::cleanUpForDevice(const sp<DeviceDescriptor>& deviceDesc)
 {
     for (ssize_t i = (ssize_t)mAudioSources.size() - 1; i >= 0; i--)  {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 87e6974..a69e088 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -602,7 +602,9 @@
         // true if given state represents a device in a telephony or VoIP call
         virtual bool isStateInCall(int state) const;
         // true if playback to call TX or capture from call RX is possible
-        bool isCallAudioAccessible();
+        bool isCallAudioAccessible() const;
+        // true if device is in a telephony or VoIP call or call screening is active
+        bool isInCallOrScreening() const;
 
         // when a device is connected, checks if an open output can be routed
         // to this device. If none is open, tries to open one of the available outputs.
@@ -639,6 +641,10 @@
             return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
         }
 
+        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
+            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
+        }
+
         void connectTelephonyRxAudioSource();
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index dfdd351..70349c2 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -202,6 +202,7 @@
       mCaptureStateNotifier(false),
       mCreateAudioPolicyManager(createAudioPolicyManager),
       mDestroyAudioPolicyManager(destroyAudioPolicyManager) {
+      setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 void AudioPolicyService::loadAudioPolicyManager()
@@ -1814,12 +1815,14 @@
 void AudioPolicyService::SensorPrivacyPolicy::registerSelf() {
     SensorPrivacyManager spm;
     mSensorPrivacyEnabled = spm.isSensorPrivacyEnabled();
+    (void)spm.addToggleSensorPrivacyListener(this);
     spm.addSensorPrivacyListener(this);
 }
 
 void AudioPolicyService::SensorPrivacyPolicy::unregisterSelf() {
     SensorPrivacyManager spm;
     spm.removeSensorPrivacyListener(this);
+    spm.removeToggleSensorPrivacyListener(this);
 }
 
 bool AudioPolicyService::SensorPrivacyPolicy::isSensorPrivacyEnabled() {
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 9c989ce..2fe7b9e 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -34,6 +34,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/MediaMetricsItem.h>
 #include <media/ShmemCompat.h>
+#include <mediautils/SchedulingPolicyService.h>
 #include <mediautils/ServiceUtilities.h>
 #include <utils/Thread.h>
 
@@ -111,6 +112,14 @@
     };
 
     void onMessageReceived(const sp<AMessage> &msg) override {
+        // No ALooper method to get the tid so update
+        // Spatializer priority on the first message received.
+        std::call_once(mPrioritySetFlag, [](){
+            const pid_t pid = getpid();
+            const pid_t tid = gettid();
+            (void)requestSpatializerPriority(pid, tid);
+        });
+
         sp<Spatializer> spatializer = mSpatializer.promote();
         if (spatializer == nullptr) {
             ALOGW("%s: Cannot promote spatializer", __func__);
@@ -163,6 +172,7 @@
     }
 private:
     wp<Spatializer> mSpatializer;
+    std::once_flag mPrioritySetFlag;
 };
 
 const std::vector<const char *> Spatializer::sHeadPoseKeys = {
@@ -255,6 +265,7 @@
     : mEngineDescriptor(engineDescriptor),
       mPolicyCallback(callback) {
     ALOGV("%s", __func__);
+    setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
 }
 
 void Spatializer::onFirstRef() {
@@ -263,7 +274,7 @@
     mLooper->start(
             /*runOnCallingThread*/false,
             /*canCallJava*/       false,
-            PRIORITY_AUDIO);
+            PRIORITY_URGENT_AUDIO);
 
     mHandler = new EngineCallbackHandler(this);
     mLooper->registerHandler(mHandler);
@@ -741,6 +752,17 @@
     msg->post();
 }
 
+void Spatializer::resetEngineHeadPose_l() {
+    ALOGV("%s mEngine %p", __func__, mEngine.get());
+    if (mEngine == nullptr) {
+        return;
+    }
+    const std::vector<float> headToStage(6, 0.0);
+    setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+    setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+            std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+}
+
 void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
     ALOGV("%s", __func__);
     sp<media::ISpatializerHeadTrackingCallback> callback;
@@ -792,8 +814,12 @@
         }
         mActualHeadTrackingMode = spatializerMode;
         if (mEngine != nullptr) {
-            setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                                 std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+            if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+                resetEngineHeadPose_l();
+            } else {
+                setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                                     std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+            }
         }
         callback = mHeadTrackingCallback;
         mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
@@ -920,23 +946,38 @@
 
 void Spatializer::checkSensorsState_l() {
     audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
-    bool lowLatencySupported = mSupportedLatencyModes.empty()
-            || (std::find(mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
-                    AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end());
-    if (mSupportsHeadTracking && mPoseController != nullptr) {
-        if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
-            && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
-            && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
-            mPoseController->setHeadSensor(mHeadSensor);
-            mPoseController->setScreenSensor(mScreenSensor);
-            requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+    const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
+    const bool supportsLowLatencyMode = supportsSetLatencyMode && std::find(
+            mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
+            AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end();
+    if (mSupportsHeadTracking) {
+        if (mPoseController != nullptr) {
+            // TODO(b/253297301, b/255433067) reenable low latency condition check
+            // for Head Tracking after Bluetooth HAL supports it correctly.
+            if (mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+                && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+                && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+                if (mEngine != nullptr) {
+                    setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+                            std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+                }
+                mPoseController->setHeadSensor(mHeadSensor);
+                mPoseController->setScreenSensor(mScreenSensor);
+                if (supportsLowLatencyMode) requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+            } else {
+                mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+                mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+                resetEngineHeadPose_l();
+            }
         } else {
-            mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
-            mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+            resetEngineHeadPose_l();
         }
     }
-    if (mOutput != AUDIO_IO_HANDLE_NONE) {
-        AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+    if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
+        const status_t status =
+                AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+        ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d",
+                __func__, mOutput, toString(requestedLatencyMode).c_str(), status);
     }
 }
 
@@ -946,8 +987,6 @@
             mEngine->setEnabled(true);
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<SpatializationLevel>{mLevel});
-            setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
-                    std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
         } else {
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<SpatializationLevel>{SpatializationLevel::NONE});
@@ -969,6 +1008,7 @@
         mPoseController->setDisplayOrientation(mDisplayOrientation);
     } else if (!isControllerNeeded && mPoseController != nullptr) {
         mPoseController.reset();
+        resetEngineHeadPose_l();
     }
     if (mPoseController != nullptr) {
         mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 2687b5c..7415b1e 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -165,14 +165,10 @@
     std::string toString(unsigned level) const NO_THREAD_SAFETY_ANALYSIS;
 
     static std::string toString(audio_latency_mode_t mode) {
-        switch (mode) {
-            case AUDIO_LATENCY_MODE_FREE:
-                return "LATENCY_MODE_FREE";
-            case AUDIO_LATENCY_MODE_LOW:
-                return "LATENCY_MODE_LOW";
-        }
-        return "EnumNotImplemented";
-    };
+        // We convert to the AIDL type to print (eventually the legacy type will be removed).
+        const auto result = legacy2aidl_audio_latency_mode_t_LatencyMode(mode);
+        return result.has_value() ? media::toString(*result) : "unknown_latency_mode";
+    }
 
     /**
      * Format head to stage vector to a string, [0.00, 0.00, 0.00, -1.29, -0.50, 15.27].
@@ -350,6 +346,12 @@
      */
     void checkEngineState_l() REQUIRES(mLock);
 
+    /**
+     * Reset head tracking mode and recenter pose in engine: Called when the head tracking
+     * is disabled.
+     */
+    void resetEngineHeadPose_l() REQUIRES(mLock);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a78112d..3f8a031 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -203,6 +203,7 @@
     status_t res;
 
     std::vector<std::string> deviceIds;
+    std::unordered_map<std::string, std::set<std::string>> unavailPhysicalIds;
     {
         Mutex::Autolock l(mServiceLock);
 
@@ -233,7 +234,7 @@
             ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
         }
 
-        deviceIds = mCameraProviderManager->getCameraDeviceIds();
+        deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
     }
 
 
@@ -242,6 +243,12 @@
         if (getCameraState(id8) == nullptr) {
             onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
         }
+        if (unavailPhysicalIds.count(cameraId) > 0) {
+            for (const auto& physicalId : unavailPhysicalIds[cameraId]) {
+                String8 physicalId8 = String8(physicalId.c_str());
+                onDeviceStatusChanged(id8, physicalId8, CameraDeviceStatus::NOT_PRESENT);
+            }
+        }
     }
 
     // Derive primary rear/front cameras, and filter their charactierstics.
@@ -335,7 +342,9 @@
         int facing = -1;
         int orientation = 0;
         String8 cameraId8(cameraId.c_str());
-        getDeviceVersion(cameraId8, /*out*/&facing, /*out*/&orientation);
+        int portraitRotation;
+        getDeviceVersion(cameraId8, /*overrideToPortrait*/false, /*out*/&portraitRotation,
+                /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
             return;
@@ -495,7 +504,7 @@
 
     if (state == nullptr) {
         ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
-                __FUNCTION__, id.string(), physicalId.string());
+                __FUNCTION__, physicalId.string(), id.string());
         return;
     }
 
@@ -675,7 +684,7 @@
     return Status::ok();
 }
 
-Status CameraService::getCameraInfo(int cameraId,
+Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
@@ -703,8 +712,9 @@
     }
 
     Status ret = Status::ok();
+    int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr.c_str(), cameraInfo);
+            cameraIdStr.c_str(), overrideToPortrait, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -739,7 +749,7 @@
 }
 
 Status CameraService::getCameraCharacteristics(const String16& cameraId,
-        int targetSdkVersion, CameraMetadata* cameraInfo) {
+        int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
@@ -766,7 +776,7 @@
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraIdStr, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraIdStr, overrideForPerfClass, cameraInfo);
+            cameraIdStr, overrideForPerfClass, cameraInfo, overrideToPortrait);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -887,8 +897,8 @@
     BasicClient::BasicClient::sCameraService = nullptr;
 }
 
-std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId, int* facing,
-        int* orientation) {
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId,
+        bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -907,7 +917,8 @@
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId.string(), &info);
+        res = mCameraProviderManager->getCameraInfo(cameraId.string(), overrideToPortrait,
+                portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
         }
@@ -942,7 +953,8 @@
         const std::optional<String16>& featureId,  const String8& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
-        apiLevel effectiveApiLevel, bool overrideForPerfClass, /*out*/sp<BasicClient>* client) {
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+        /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -975,13 +987,16 @@
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
         *client = new Camera2Client(cameraService, tmp, packageName, featureId,
                 cameraId, api1CameraId, facing, sensorOrientation, clientPid, clientUid,
-                servicePid, overrideForPerfClass);
+                servicePid, overrideForPerfClass, overrideToPortrait);
+        ALOGI("%s: Camera1 API (legacy), override to portrait %d", __FUNCTION__,
+                overrideToPortrait);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
         *client = new CameraDeviceClient(cameraService, tmp, packageName,
                 systemNativeClient, featureId, cameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass);
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait);
+        ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
 }
@@ -1071,7 +1086,7 @@
             sp<ICameraClient>{nullptr}, id, cameraId,
             internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*out*/ tmp)
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1587,6 +1602,7 @@
         int clientUid,
         int clientPid,
         int targetSdkVersion,
+        bool overrideToPortrait,
         /*out*/
         sp<ICamera>* device) {
 
@@ -1597,7 +1613,8 @@
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
             clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
-            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, /*out*/client);
+            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
+            overrideToPortrait, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1672,6 +1689,7 @@
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
+        bool overrideToPortrait,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
 
@@ -1725,7 +1743,7 @@
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
             /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, /*out*/client);
+            targetSdkVersion, overrideToPortrait, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1787,7 +1805,7 @@
         int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        /*out*/sp<CLIENT>& device) {
+        bool overrideToPortrait, /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     bool isNonSystemNdk = false;
@@ -1886,8 +1904,10 @@
         // give flashlight a chance to close devices if necessary.
         mFlashlight->prepareDeviceOpen(cameraId);
 
+        int portraitRotation;
         auto deviceVersionAndTransport =
-                getDeviceVersion(cameraId, /*out*/&facing, /*out*/&orientation);
+                getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+                        /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
@@ -1901,7 +1921,7 @@
                 clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                /*out*/&tmp)).isOk()) {
+                overrideToPortrait, /*out*/&tmp)).isOk()) {
             return ret;
         }
         client = static_cast<CLIENT*>(tmp.get());
@@ -1961,8 +1981,25 @@
         // Set rotate-and-crop override behavior
         if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
             client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
+        } else if (overrideToPortrait && portraitRotation != 0) {
+            uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
+            switch (portraitRotation) {
+                case 90:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
+                    break;
+                case 180:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_180;
+                    break;
+                case 270:
+                    rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_270;
+                    break;
+                default:
+                    ALOGE("Unexpected portrait rotation: %d", portraitRotation);
+                    break;
+            }
+            client->setRotateAndCropOverride(rotateAndCropMode);
         } else {
-          client->setRotateAndCropOverride(
+            client->setRotateAndCropOverride(
               CameraServiceProxyWrapper::getRotateAndCropOverride(
                   clientPackageName, facing, multiuser_get_user_id(clientUid)));
         }
@@ -2466,6 +2503,11 @@
 
     ATRACE_CALL();
 
+    {
+        Mutex::Autolock lock(mServiceLock);
+        mDeviceState = newState;
+    }
+
     mCameraProviderManager->notifyDeviceStateChange(newState);
 
     return Status::ok();
@@ -2499,12 +2541,12 @@
     for (auto& current : clients) {
         if (current != nullptr) {
             const auto basicClient = current->getValue();
-            if (basicClient.get() != nullptr) {
-              basicClient->setRotateAndCropOverride(
-                  CameraServiceProxyWrapper::getRotateAndCropOverride(
-                      basicClient->getPackageName(),
-                      basicClient->getCameraFacing(),
-                      multiuser_get_user_id(basicClient->getClientUid())));
+            if (basicClient.get() != nullptr && !basicClient->getOverrideToPortrait()) {
+                basicClient->setRotateAndCropOverride(
+                        CameraServiceProxyWrapper::getRotateAndCropOverride(
+                                basicClient->getPackageName(),
+                                basicClient->getCameraFacing(),
+                                multiuser_get_user_id(basicClient->getClientUid())));
             }
         }
     }
@@ -2776,7 +2818,8 @@
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
     }
 
-    auto deviceVersionAndTransport = getDeviceVersion(id);
+    int portraitRotation;
+    auto deviceVersionAndTransport = getDeviceVersion(id, false, &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
         String8 msg = String8::format("Unknown camera ID %s", id.string());
         ALOGE("%s: %s", __FUNCTION__, msg.string());
@@ -3261,13 +3304,13 @@
         const String8& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
-        int servicePid) :
+        int servicePid, bool overrideToPortrait) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
-                servicePid),
+                servicePid, overrideToPortrait),
         mCameraId(api1CameraId)
 {
     int callingPid = CameraThreadState::getCallingPid();
@@ -3297,7 +3340,7 @@
         const String16& clientPackageName, bool nativeClient,
         const std::optional<String16>& clientFeatureId, const String8& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid):
+        int servicePid, bool overrideToPortrait):
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -3305,6 +3348,7 @@
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
+        mOverrideToPortrait(overrideToPortrait),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback),
         mOpsActive(false),
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f2d15ef..840e9b6 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -127,10 +127,10 @@
     // ICameraService
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
 
-    virtual binder::Status     getCameraInfo(int cameraId,
-            hardware::CameraInfo* cameraInfo);
+    virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
+            hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const String16& cameraId,
-            int targetSdkVersion, CameraMetadata* cameraInfo);
+            int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
             hardware::camera2::params::VendorTagDescriptor* desc);
@@ -141,13 +141,14 @@
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const String16& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
+            bool overrideToPortrait,
             /*out*/
-            sp<hardware::ICamera>* device);
+            sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
             const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion,
+            int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -243,8 +244,9 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId, int* facing = nullptr,
-            int* orientation = nullptr);
+    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId,
+            bool overrideToPortrait, int* portraitRotation,
+            int* facing = nullptr, int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
     // Methods to be used in CameraService class tests only
@@ -282,6 +284,10 @@
             return mRemoteBinder;
         }
 
+        bool getOverrideToPortrait() const {
+            return mOverrideToPortrait;
+        }
+
         // Disallows dumping over binder interface
         virtual status_t dump(int fd, const Vector<String16>& args);
         // Internal dump method to be called by CameraService
@@ -361,7 +367,8 @@
                 int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
-                int servicePid);
+                int servicePid,
+                bool overrideToPortrait);
 
         virtual ~BasicClient();
 
@@ -384,6 +391,7 @@
         const pid_t                     mServicePid;
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
+        bool                            mOverrideToPortrait;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -473,7 +481,8 @@
                 int sensorOrientation,
                 int clientPid,
                 uid_t clientUid,
-                int servicePid);
+                int servicePid,
+                bool overrideToPortrait);
         ~Client();
 
         // return our camera client
@@ -843,7 +852,7 @@
             int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            /*out*/sp<CLIENT>& device);
+            bool overrideToPortrait, /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
@@ -1258,7 +1267,7 @@
             const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, /*out*/sp<BasicClient>* client);
+            bool overrideForPerfClass, bool overrideToPortrait, /*out*/sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
@@ -1355,6 +1364,9 @@
     // Guard mInjectionInternalCamId and mInjectionInitPending.
     Mutex mInjectionParametersLock;
 
+    // Track the folded/unfoled device state. 0 == UNFOLDED, 4 == FOLDED
+    int64_t mDeviceState;
+
     void updateTorchUidMapLocked(const String16& cameraId, int uid);
 };
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 20bf73d..0887ced 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -61,11 +61,13 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideForPerfClass):
+        bool overrideForPerfClass,
+        bool overrideToPortrait):
         Camera2ClientBase(cameraService, cameraClient, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideForPerfClass, /*legacyClient*/ true),
+                clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing)
 {
     ATRACE_CALL();
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 8081efa..9c540a4 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -107,7 +107,8 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideForPerfClass);
+            bool overrideForPerfClass,
+            bool overrideToPortrait);
 
     virtual ~Camera2Client();
 
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 2daacd1..74423e5 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -59,6 +59,8 @@
         m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
         m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
         m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+
+        mLastFocalLength = l.mParameters.params.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
     }
 }
 
@@ -92,9 +94,32 @@
         client->notifyRequestId(mCurrentRequestId);
     }
 
+    processLensState(frame.mMetadata, client);
+
     return FrameProcessorBase::processSingleFrame(frame, device);
 }
 
+void FrameProcessor::processLensState(const CameraMetadata &frame,
+        const sp<Camera2Client> &client) {
+    ATRACE_CALL();
+    camera_metadata_ro_entry_t entry;
+
+    entry = frame.find(ANDROID_LENS_FOCAL_LENGTH);
+    if (entry.count == 0) {
+        return;
+    }
+
+    if (fabs(entry.data.f[0] - mLastFocalLength) > 0.001f) {
+        SharedParameters::Lock l(client->getParameters());
+        l.mParameters.params.setFloat(
+                CameraParameters::KEY_FOCAL_LENGTH,
+                entry.data.f[0]);
+        l.mParameters.paramsFlattened = l.mParameters.params.flatten();
+
+        mLastFocalLength = entry.data.f[0];
+    }
+}
+
 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
         const sp<Camera2Client> &client) {
     status_t res = BAD_VALUE;
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index bb985f6..6c8d221 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -57,6 +57,9 @@
     virtual bool processSingleFrame(CaptureResult &frame,
                                     const sp<FrameProducer> &device);
 
+    void processLensState(const CameraMetadata &frame,
+            const sp<Camera2Client> &client);
+
     status_t processFaceDetect(const CameraMetadata &frame,
             const sp<Camera2Client> &client);
 
@@ -110,6 +113,9 @@
     // Emit FaceDetection event to java if faces changed
     void callbackFaceDetection(const sp<Camera2Client>& client,
                                const camera_frame_metadata &metadata);
+
+    // Track most recent focal length sent by the camera device
+    float mLastFocalLength;
 };
 
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 15df981..39f8d00 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -66,7 +66,8 @@
         int sensorOrientation,
         int clientPid,
         uid_t clientUid,
-        int servicePid) :
+        int servicePid,
+        bool overrideToPortrait) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
             clientPackageName,
@@ -77,7 +78,8 @@
             sensorOrientation,
             clientPid,
             clientUid,
-            servicePid),
+            servicePid,
+            overrideToPortrait),
     mRemoteCallback(remoteCallback) {
     // We don't need it for API2 clients, but Camera2ClientBase requires it.
     (void) api1CameraId;
@@ -96,10 +98,11 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideForPerfClass) :
+        bool overrideForPerfClass,
+        bool overrideToPortrait) :
     Camera2ClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass),
+                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 45915ba..6bb64d6 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -58,7 +58,8 @@
             int sensorOrientation,
             int clientPid,
             uid_t clientUid,
-            int servicePid);
+            int servicePid,
+            bool overrideToPortrait);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -187,7 +188,8 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideForPerfClass);
+            bool overrideForPerfClass,
+            bool overrideToPortrait);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 9ea1093..8edb64a 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -56,7 +56,8 @@
                     IInterface::asBinder(remoteCallback),
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
-                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid),
+                    cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
+                    /*overrideToPortrait*/false),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 54cc27a..237ce5e 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -1161,11 +1161,13 @@
     inputFrame.fileFd = -1;
 
     // Fill in HEIC header
-    uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
-    CameraBlob *blobHeader = (CameraBlob *)header;
     // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
-    blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
-    blobHeader->blobSizeBytes = fSize;
+    uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
+    CameraBlob blobHeader = {
+        .blobId = static_cast<CameraBlobId>(0x00FE),
+        .blobSizeBytes = static_cast<int32_t>(fSize)
+    };
+    memcpy(header, &blobHeader, sizeof(CameraBlob));
 
     res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
     if (res != OK) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 7d98a0b..ad24392 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -60,10 +60,11 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
+        bool overrideToPortrait,
         bool legacyClient):
         TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid),
+                clientUid, servicePid, overrideToPortrait),
         mSharedCameraCallbacks(remoteCallback),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
@@ -117,12 +118,12 @@
         case IPCTransport::HIDL:
             mDevice =
                     new HidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                            TClientBase::mOverrideToPortrait, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
                     new AidlCamera3Device(TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            mLegacyClient);
+                            TClientBase::mOverrideToPortrait, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index e51d25d..d2dcdb1 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -59,6 +59,7 @@
                       uid_t clientUid,
                       int servicePid,
                       bool overrideForPerfClass,
+                      bool overrideToPortrait,
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 6ef16b3..3132787 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -197,12 +197,17 @@
     return std::make_pair(systemCameraCount, publicCameraCount);
 }
 
-std::vector<std::string> CameraProviderManager::getCameraDeviceIds() const {
+std::vector<std::string> CameraProviderManager::getCameraDeviceIds(std::unordered_map<
+            std::string, std::set<std::string>>* unavailablePhysicalIds) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     std::vector<std::string> deviceIds;
     for (auto& provider : mProviders) {
         for (auto& id : provider->mUniqueCameraIds) {
             deviceIds.push_back(id);
+            if (unavailablePhysicalIds != nullptr &&
+                    provider->mUnavailablePhysicalCameras.count(id) > 0) {
+                (*unavailablePhysicalIds)[id] = provider->mUnavailablePhysicalCameras.at(id);
+            }
         }
     }
     return deviceIds;
@@ -318,13 +323,13 @@
 }
 
 status_t CameraProviderManager::getCameraInfo(const std::string &id,
-        hardware::CameraInfo* info) const {
+        bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    return deviceInfo->getCameraInfo(info);
+    return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -356,9 +361,11 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
-        bool overrideForPerfClass, CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics,
+        bool overrideToPortrait) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
-    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics);
+    return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
+            overrideToPortrait);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -839,9 +846,6 @@
 
 void CameraProviderManager::ProviderInfo::initializeProviderInfoCommon(
         const std::vector<std::string> &devices) {
-
-    sp<StatusListener> listener = mManager->getStatusListener();
-
     for (auto& device : devices) {
         std::string id;
         status_t res = addDevice(device, CameraDeviceStatus::PRESENT, &id);
@@ -856,38 +860,22 @@
             mProviderName.c_str(), mDevices.size());
 
     // Process cached status callbacks
-    std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus =
-            std::make_unique<std::vector<CameraStatusInfoT>>();
     {
         std::lock_guard<std::mutex> lock(mInitLock);
 
         for (auto& statusInfo : mCachedStatus) {
             std::string id, physicalId;
-            status_t res = OK;
             if (statusInfo.isPhysicalCameraStatus) {
-                res = physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
+                physicalCameraDeviceStatusChangeLocked(&id, &physicalId,
                     statusInfo.cameraId, statusInfo.physicalCameraId, statusInfo.status);
             } else {
-                res = cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
-            }
-            if (res == OK) {
-                cachedStatus->emplace_back(statusInfo.isPhysicalCameraStatus,
-                        id.c_str(), physicalId.c_str(), statusInfo.status);
+                cameraDeviceStatusChangeLocked(&id, statusInfo.cameraId, statusInfo.status);
             }
         }
         mCachedStatus.clear();
 
         mInitialized = true;
     }
-
-    // The cached status change callbacks cannot be fired directly from this
-    // function, due to same-thread deadlock trying to acquire mInterfaceMutex
-    // twice.
-    if (listener != nullptr) {
-        mInitialStatusCallbackFuture = std::async(std::launch::async,
-                &CameraProviderManager::ProviderInfo::notifyInitialStatusChange, this,
-                listener, std::move(cachedStatus));
-    }
 }
 
 CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
@@ -1957,6 +1945,7 @@
     for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
         if ((*it)->mId == id) {
             mUniqueCameraIds.erase(id);
+            mUnavailablePhysicalCameras.erase(id);
             if ((*it)->isAPI1Compatible()) {
                 mUniqueAPI1CompatibleCameraIds.erase(std::remove(
                     mUniqueAPI1CompatibleCameraIds.begin(),
@@ -2027,7 +2016,9 @@
         dprintf(fd, "    Has a flash unit: %s\n",
                 device->hasFlashUnit() ? "true" : "false");
         hardware::CameraInfo info;
-        status_t res = device->getCameraInfo(&info);
+        int portraitRotation;
+        status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+                &info);
         if (res != OK) {
             dprintf(fd, "   <Error reading camera info: %s (%d)>\n",
                     strerror(-res), res);
@@ -2037,7 +2028,8 @@
             dprintf(fd, "    Orientation: %d\n", info.orientation);
         }
         CameraMetadata info2;
-        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2);
+        res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
+                /*overrideToPortrait*/true);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -2224,6 +2216,15 @@
         return BAD_VALUE;
     }
 
+    if (mUnavailablePhysicalCameras.count(cameraId) == 0) {
+        mUnavailablePhysicalCameras.emplace(cameraId, std::set<std::string>{});
+    }
+    if (newStatus != CameraDeviceStatus::PRESENT) {
+        mUnavailablePhysicalCameras[cameraId].insert(physicalCameraDeviceName);
+    } else {
+        mUnavailablePhysicalCameras[cameraId].erase(physicalCameraDeviceName);
+    }
+
     *id = cameraId;
     *physicalId = physicalCameraDeviceName.c_str();
     return OK;
@@ -2282,20 +2283,6 @@
     }
 }
 
-void CameraProviderManager::ProviderInfo::notifyInitialStatusChange(
-        sp<StatusListener> listener,
-        std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus) {
-    for (auto& statusInfo : *cachedStatus) {
-        if (statusInfo.isPhysicalCameraStatus) {
-            listener->onDeviceStatusChanged(String8(statusInfo.cameraId.c_str()),
-                    String8(statusInfo.physicalCameraId.c_str()), statusInfo.status);
-        } else {
-            listener->onDeviceStatusChanged(
-                    String8(statusInfo.cameraId.c_str()), statusInfo.status);
-        }
-    }
-}
-
 CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
         const metadata_vendor_id_t tagId, const std::string &id,
         uint16_t minorVersion,
@@ -2314,6 +2301,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
+        bool overrideToPortrait, int *portraitRotation,
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
 
@@ -2344,6 +2332,17 @@
         return NAME_NOT_FOUND;
     }
 
+    if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+        *portraitRotation = 90;
+        if (info->facing == hardware::CAMERA_FACING_FRONT) {
+            info->orientation = (360 + info->orientation - 90) % 360;
+        } else {
+            info->orientation = (360 + info->orientation + 90) % 360;
+        }
+    } else {
+        *portraitRotation = 0;
+    }
+
     return OK;
 }
 bool CameraProviderManager::ProviderInfo::DeviceInfo3::isAPI1Compatible() const {
@@ -2369,7 +2368,7 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        bool overrideForPerfClass, CameraMetadata *characteristics) const {
+        bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
     if (characteristics == nullptr) return BAD_VALUE;
 
     if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2378,6 +2377,35 @@
         *characteristics = mCameraCharacteristics;
     }
 
+    if (overrideToPortrait) {
+        const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
+        const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
+        if (lensFacingEntry.count > 0 && sensorOrientationEntry.count > 0) {
+            uint8_t lensFacing = lensFacingEntry.data.u8[0];
+            int32_t sensorOrientation = sensorOrientationEntry.data.i32[0];
+            int32_t newSensorOrientation = sensorOrientation;
+
+            if (sensorOrientation == 0 || sensorOrientation == 180) {
+                if (lensFacing == ANDROID_LENS_FACING_FRONT) {
+                    newSensorOrientation = (360 + sensorOrientation - 90) % 360;
+                } else if (lensFacing == ANDROID_LENS_FACING_BACK) {
+                    newSensorOrientation = (360 + sensorOrientation + 90) % 360;
+                }
+            }
+
+            if (newSensorOrientation != sensorOrientation) {
+                ALOGV("%s: Update ANDROID_SENSOR_ORIENTATION for lens facing %d "
+                        "from %d to %d", __FUNCTION__, lensFacing, sensorOrientation,
+                        newSensorOrientation);
+                characteristics->update(ANDROID_SENSOR_ORIENTATION, &newSensorOrientation, 1);
+            }
+        }
+
+        if (characteristics->exists(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS)) {
+            characteristics->erase(ANDROID_INFO_DEVICE_STATE_ORIENTATIONS);
+        }
+    }
+
     return OK;
 }
 
@@ -2645,9 +2673,6 @@
 }
 
 CameraProviderManager::ProviderInfo::~ProviderInfo() {
-    if (mInitialStatusCallbackFuture.valid()) {
-        mInitialStatusCallbackFuture.wait();
-    }
     // Destruction of ProviderInfo is only supposed to happen when the respective
     // CameraProvider interface dies, so do not unregister callbacks.
 }
@@ -2710,10 +2735,12 @@
 }
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
-        bool overrideForPerfClass, CameraMetadata* characteristics) const {
+        bool overrideForPerfClass, CameraMetadata* characteristics,
+        bool overrideToPortrait) const {
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
-        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics);
+        return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
+                overrideToPortrait);
     }
 
     // Find hidden physical camera characteristics
@@ -2748,7 +2775,9 @@
         combo.push_back(deviceId);
 
         hardware::CameraInfo info;
-        status_t res = deviceInfo->getCameraInfo(&info);
+        int portraitRotation;
+        status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
+                &info);
         if (res != OK) {
             ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
             continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index d049aff..e8d9a37 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -23,7 +23,6 @@
 #include <set>
 #include <string>
 #include <mutex>
-#include <future>
 
 #include <camera/camera2/ConcurrentCamera.h>
 #include <camera/CameraParameters2.h>
@@ -220,7 +219,14 @@
      */
     std::pair<int, int> getCameraCount() const;
 
-    std::vector<std::string> getCameraDeviceIds() const;
+    /**
+     * Upon the function return, if unavailablePhysicalIds is not nullptr, it
+     * will contain all of the unavailable physical camera Ids represented in
+     * the form of:
+     * {[logicalCamera, {physicalCamera1, physicalCamera2, ...}], ...}.
+     */
+    std::vector<std::string> getCameraDeviceIds(std::unordered_map<
+            std::string, std::set<std::string>>* unavailablePhysicalIds = nullptr) const;
 
     /**
      * Retrieve the number of API1 compatible cameras; these are internal and
@@ -251,14 +257,15 @@
      * Return the old camera API camera info
      */
     status_t getCameraInfo(const std::string &id,
-            hardware::CameraInfo* info) const;
+            bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
 
     /**
      * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
      * not have a v3 or newer HAL version.
      */
     status_t getCameraCharacteristics(const std::string &id,
-            bool overrideForPerfClass, CameraMetadata* characteristics) const;
+            bool overrideForPerfClass, CameraMetadata* characteristics,
+            bool overrideToPortrait) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -560,13 +567,16 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
+            virtual status_t getCameraInfo(bool overrideToPortrait,
+                    int *portraitRotation,
+                    hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
             virtual status_t dumpState(int fd) = 0;
             virtual status_t getCameraCharacteristics(bool overrideForPerfClass,
-                    CameraMetadata *characteristics) const {
+                    CameraMetadata *characteristics, bool overrideToPortrait) {
                 (void) overrideForPerfClass;
                 (void) characteristics;
+                (void) overrideToPortrait;
                 return INVALID_OPERATION;
             }
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
@@ -607,6 +617,7 @@
         };
         std::vector<std::unique_ptr<DeviceInfo>> mDevices;
         std::unordered_set<std::string> mUniqueCameraIds;
+        std::unordered_map<std::string, std::set<std::string>> mUnavailablePhysicalCameras;
         int mUniqueDeviceCount;
         std::vector<std::string> mUniqueAPI1CompatibleCameraIds;
         // The initial public camera IDs published by the camera provider.
@@ -622,12 +633,15 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+            virtual status_t getCameraInfo(bool overrideToPortrait,
+                    int *portraitRotation,
+                    hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
             virtual status_t dumpState(int fd) = 0;
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
-                    CameraMetadata *characteristics) const override;
+                    CameraMetadata *characteristics,
+                    bool overrideToPortrait) override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t isSessionConfigurationSupported(
@@ -715,8 +729,6 @@
         std::vector<CameraStatusInfoT> mCachedStatus;
         // End of scope for mInitLock
 
-        std::future<void> mInitialStatusCallbackFuture;
-
         std::unique_ptr<ProviderInfo::DeviceInfo>
         virtual initializeDeviceInfo(
                 const std::string &name, const metadata_vendor_id_t tagId,
@@ -724,9 +736,6 @@
 
         virtual status_t reCacheConcurrentStreamingCameraIdsLocked() = 0;
 
-        void notifyInitialStatusChange(sp<StatusListener> listener,
-                std::unique_ptr<std::vector<CameraStatusInfoT>> cachedStatus);
-
         std::vector<std::unordered_set<std::string>> mConcurrentCameraIdCombinations;
 
         // Parse provider instance name for type and id
@@ -830,7 +839,7 @@
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
     status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
-            CameraMetadata* characteristics) const;
+            CameraMetadata* characteristics, bool overrideToPortrait) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index ef68f28..2c035de 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -759,7 +759,8 @@
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+                /*overrideToPortrait*/true);
         if (res != OK) {
             return res;
         }
@@ -767,7 +768,7 @@
                 [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
+                                                   &physicalDeviceInfo, /*overrideToPortrait*/true);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d60565f..1df6ec4 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -919,7 +919,8 @@
         bool overrideForPerfClass =
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo);
+        res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
+                /*overrideToPortrait*/true);
         if (res != OK) {
             return res;
         }
@@ -927,7 +928,7 @@
                 [this](const String8 &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
-                                                   &physicalDeviceInfo);
+                            &physicalDeviceInfo, /*overrideToPortrait*/true);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index ca70b88..f10d41b 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -73,7 +73,8 @@
 
 namespace android {
 
-Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool legacyClient):
+Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient):
         mId(id),
         mLegacyClient(legacyClient),
         mOperatingMode(NO_MODE),
@@ -94,7 +95,8 @@
         mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
         mLastTemplateId(-1),
         mNeedFixupMonochromeTags(false),
-        mOverrideForPerfClass(overrideForPerfClass)
+        mOverrideForPerfClass(overrideForPerfClass),
+        mOverrideToPortrait(overrideToPortrait)
 {
     ATRACE_CALL();
     ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@@ -166,7 +168,7 @@
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute);
+            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait);
     res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -2879,7 +2881,8 @@
         sp<StatusTracker> statusTracker,
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
-        bool supportCameraMute) :
+        bool supportCameraMute,
+        bool overrideToPortrait) :
         Thread(/*canCallJava*/false),
         mParent(parent),
         mStatusTracker(statusTracker),
@@ -2908,7 +2911,8 @@
         mSessionParamKeys(sessionParamKeys),
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
-        mSupportCameraMute(supportCameraMute){
+        mSupportCameraMute(supportCameraMute),
+        mOverrideToPortrait(overrideToPortrait) {
     mStatusId = statusTracker->addComponent("RequestThread");
 }
 
@@ -3577,9 +3581,9 @@
         mPrevTriggers = triggerCount;
 
         // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output. The display rotation there will be
-        // compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        bool rotateAndCropChanged = mComposerOutput ? false :
+        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
+        bool rotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
             overrideAutoRotateAndCrop(captureRequest);
         bool testPatternChanged = overrideTestPattern(captureRequest);
 
@@ -4620,6 +4624,15 @@
         const sp<CaptureRequest> &request) {
     ATRACE_CALL();
 
+    if (mOverrideToPortrait) {
+        Mutex::Autolock l(mTriggerMutex);
+        uint8_t rotateAndCrop_u8 = mRotateAndCropOverride;
+        CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
+        metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
+                &rotateAndCrop_u8, 1);
+        return true;
+    }
+
     if (request->mRotateAndCropAuto) {
         Mutex::Autolock l(mTriggerMutex);
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index bcb7695..1a21c12 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -82,7 +82,8 @@
   friend class AidlCamera3Device;
   public:
 
-    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool legacyClient = false);
+    explicit Camera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+            bool legacyClient = false);
 
     virtual ~Camera3Device();
     // Delete and optionally close native handles and clear the input vector afterward
@@ -788,7 +789,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait);
         ~RequestThread();
 
         void     setNotificationListener(wp<NotificationListener> listener);
@@ -1065,6 +1067,7 @@
 
         const bool         mUseHalBufManager;
         const bool         mSupportCameraMute;
+        const bool         mOverrideToPortrait;
     };
 
     virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> /*parent*/,
@@ -1072,7 +1075,8 @@
                 sp<HalInterface> /*interface*/,
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
-                bool /*supportCameraMute*/) = 0;
+                bool /*supportCameraMute*/,
+                bool /*overrideToPortrait*/) = 0;
 
     sp<RequestThread> mRequestThread;
 
@@ -1342,6 +1346,10 @@
     // performance class.
     bool mOverrideForPerfClass;
 
+    // Whether the camera framework overrides the device characteristics for
+    // app compatibility reasons.
+    bool mOverrideToPortrait;
+
     // The current minimum expected frame duration based on AE_TARGET_FPS_RANGE
     nsecs_t mMinExpectedDuration = 0;
     // Whether the camera device runs at fixed frame rate based on AE_MODE and
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 84b5aa4..396104c 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -1526,7 +1526,8 @@
                 vsyncTime.deadlineTimestamp >= currentTime &&
                 ((!cameraDisplayInSync && vsyncTime.expectedPresentationTime > minPresentT) ||
                  (cameraDisplayInSync && vsyncTime.expectedPresentationTime >
-                mLastPresentTime + minInterval + biasForShortDelay * kTimelineThresholdNs))) {
+                mLastPresentTime + minInterval +
+                    static_cast<nsecs_t>(biasForShortDelay * kTimelineThresholdNs)))) {
             expectedPresentT = vsyncTime.expectedPresentationTime;
             minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
         }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index e16982b..6569395 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -521,7 +521,7 @@
         if (result->partial_result != 0)
             request.resultExtras.partialResultCount = result->partial_result;
 
-        if ((result->result != nullptr) && !states.legacyClient) {
+        if ((result->result != nullptr) && !states.legacyClient && !states.overrideToPortrait) {
             camera_metadata_ro_entry entry;
             auto ret = find_camera_metadata_ro_entry(result->result,
                     ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 8c71c2b..019c8a8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -107,6 +107,7 @@
         bool legacyClient;
         nsecs_t& minFrameDuration;
         bool& isFixedFps;
+        bool overrideToPortrait;
     };
 
     void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result);
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index b3cb178..83caa00 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -69,7 +69,9 @@
     }
 
     // Cache the frame to match readout time interval, for up to kMaxFrameWaitTime
-    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
+    // Because the code between here and queueBuffer() takes time to execute, make sure the
+    // presentationInterval is slightly shorter than readoutInterval.
+    nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval - kFrameAdjustThreshold;
     nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
     if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
         mBufferCond.waitRelative(mLock, frameWaitTime);
@@ -78,9 +80,9 @@
         }
         currentTime = systemTime();
     }
-    ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+    ALOGV("%s: readoutInterval %" PRId64 ", waited for %" PRId64
             ", timestamp %" PRId64, __FUNCTION__, readoutInterval,
-            currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
+            mPendingBuffers.size() < 2 ? frameWaitTime : 0, buffer.timestamp);
     mPendingBuffers.pop();
     queueBufferToClientLocked(buffer, currentTime);
     return true;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index cb9690c..f46de3d 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -86,6 +86,7 @@
     static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
     static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
     static constexpr nsecs_t kMaxFrameWaitTime = 10000000LL; // 10ms
+    static constexpr nsecs_t kFrameAdjustThreshold = 2000000LL; // 2ms
 };
 
 }; //namespace camera3
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index a02e5f6..9cdd365 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -142,13 +142,13 @@
                    ch :                 // pillarbox or 1:1, full height
                    cw / mRotateAspect;  // letterbox, not full height
         switch (rotateMode) {
-            case ANDROID_SCALER_ROTATE_AND_CROP_90:
+            case ANDROID_SCALER_ROTATE_AND_CROP_270:
                 transformMat[1] = -rw / ch; // +y -> -x
                 transformMat[2] =  rh / cw; // +x -> +y
                 xShift = (cw + rw) / 2; // left edge of crop to right edge of rotated
                 yShift = (ch - rh) / 2; // top edge of crop to top edge of rotated
                 break;
-            case ANDROID_SCALER_ROTATE_AND_CROP_270:
+            case ANDROID_SCALER_ROTATE_AND_CROP_90:
                 transformMat[1] =  rw / ch; // +y -> +x
                 transformMat[2] = -rh / cw; // +x -> -y
                 xShift = (cw - rw) / 2; // left edge of crop to left edge of rotated
@@ -271,13 +271,13 @@
         rx = cx + (cw - rw) / 2;
         ry = cy + (ch - rh) / 2;
         switch (rotateMode) {
-            case ANDROID_SCALER_ROTATE_AND_CROP_90:
+            case ANDROID_SCALER_ROTATE_AND_CROP_270:
                 transformMat[1] =  ch / rw; // +y -> +x
                 transformMat[2] = -cw / rh; // +x -> -y
                 xShift = -(cw - rw) / 2; // left edge of rotated to left edge of cropped
                 yShift = ry - cy + ch;   // top edge of rotated to bottom edge of cropped
                 break;
-            case ANDROID_SCALER_ROTATE_AND_CROP_270:
+            case ANDROID_SCALER_ROTATE_AND_CROP_90:
                 transformMat[1] = -ch / rw; // +y -> -x
                 transformMat[2] =  cw / rh; // +x -> +y
                 xShift = (cw + rw) / 2; // left edge of rotated to left edge of cropped
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index ec28d31..1e103f2 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -163,8 +163,9 @@
 }
 
 AidlCamera3Device::AidlCamera3Device(const String8& id, bool overrideForPerfClass,
-            bool legacyClient) : Camera3Device(id, overrideForPerfClass, legacyClient) {
-        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+        bool overrideToPortrait, bool legacyClient) :
+        Camera3Device(id, overrideForPerfClass, overrideToPortrait, legacyClient) {
+    mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
 status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
@@ -193,7 +194,8 @@
       SET_ERR("Session iface returned is null");
       return INVALID_OPERATION;
     }
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+            mOverrideToPortrait);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -207,7 +209,8 @@
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+                    /*overrideToPortrait*/true);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -372,8 +375,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
-        mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+        mOverrideToPortrait}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -414,8 +417,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps},
-        mResultMetadataQueue
+        *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
+        mOverrideToPortrait}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -1408,9 +1411,10 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) :
+                bool supportCameraMute,
+                bool overrideToPortrait) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute) {}
+                  supportCameraMute, overrideToPortrait) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
@@ -1579,9 +1583,10 @@
                 sp<Camera3Device::HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) {
+                bool supportCameraMute,
+                bool overrideToPortrait) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute);
+            useHalBufManager, supportCameraMute, overrideToPortrait);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index fd66661..630985f 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -39,7 +39,7 @@
     using AidlRequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
     class AidlCameraDeviceCallbacks;
     friend class AidlCameraDeviceCallbacks;
-    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass,
+    explicit AidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -174,7 +174,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -259,7 +260,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) override;
+                bool supportCameraMute,
+                bool overrideToPortrait) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 8ff0b07..9ce0622 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -124,8 +124,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -170,8 +170,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
-        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 9557692..44c60cf 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -162,7 +162,8 @@
         return res;
     }
 
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo);
+    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+            mOverrideToPortrait);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -176,7 +177,8 @@
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
-                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId]);
+                    physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
+                    /*overrideToPortrait*/true);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -363,7 +365,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+        mResultMetadataQueue
     };
 
     //HidlCaptureOutputStates hidlStates {
@@ -425,7 +428,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+        mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -472,7 +476,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps}, mResultMetadataQueue
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait},
+        mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
@@ -698,9 +703,10 @@
                 sp<Camera3Device::HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) {
+                bool supportCameraMute,
+                bool overrideToPortrait) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute);
+                useHalBufManager, supportCameraMute, overrideToPortrait);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -1693,9 +1699,10 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) :
+                bool supportCameraMute,
+                bool overrideToPortrait) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute) {}
+                  supportCameraMute, overrideToPortrait) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 2e98fe0..2bd4660 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,8 +31,9 @@
             public Camera3Device {
   public:
 
-   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass,
-          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, legacyClient) { }
+   explicit HidlCamera3Device(const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+          bool legacyClient = false) : Camera3Device(id, overrideForPerfClass, overrideToPortrait,
+          legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -175,7 +176,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute);
+                bool supportCameraMute,
+                bool overrideToPortrait);
 
         status_t switchToOffline(
                 const std::vector<int32_t>& streamsToKeep,
@@ -222,7 +224,8 @@
                 sp<HalInterface> interface,
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
-                bool supportCameraMute) override;
+                bool supportCameraMute,
+                bool overrideToPortrait) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
             createCamera3DeviceInjectionMethods(wp<Camera3Device>) override;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index 2b4f8a1..c7f8fa1 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -105,8 +105,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -146,8 +146,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -182,8 +182,8 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps},
-      mResultMetadataQueue
+        mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
+        /*overrideToPortrait*/false}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 65a0300..259e8a5 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -65,7 +65,8 @@
     HStatus status = HStatus::NO_ERROR;
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraMetadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -116,7 +117,8 @@
     binder::Status serviceRet = mAidlICameraService->connectDevice(
             callbacks, String16(cameraId.c_str()), String16(""), {},
             hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*out*/&deviceRemote);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+            /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index d3377f4..ae4d5dd 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -31,47 +31,48 @@
 std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
       {30, {
           ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
           ANDROID_CONTROL_ZOOM_RATIO_RANGE,
           ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
-          ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
         } },
       {31, {
-          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
-          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
-          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
-          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
           ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
-          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+          ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+          ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+          ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+          ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+          ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
           ANDROID_SENSOR_INFO_BINNING_FACTOR,
+          ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+          ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
         } },
       {32, {
           ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
         } },
       {33, {
-          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
           ANDROID_AUTOMOTIVE_LENS_FACING,
           ANDROID_AUTOMOTIVE_LOCATION,
+          ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
           ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
           ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
-          ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
-          ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+          ANDROID_SENSOR_READOUT_TIMESTAMP,
         } },
 };
 
@@ -81,9 +82,9 @@
  */
 std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
       {30, {
+          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
           ANDROID_CONTROL_ZOOM_RATIO,
           ANDROID_SCALER_ROTATE_AND_CROP,
-          ANDROID_CONTROL_EXTENDED_SCENE_MODE,
         }  },
       {31, {
           ANDROID_SENSOR_PIXEL_MODE,
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 97d7bf4..09f8eb6 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -229,11 +229,11 @@
     mCameraService->getCameraVendorTagCache(&cache);
 
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, &cameraInfo);
+    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, &metadata);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -320,7 +320,8 @@
 
         rc = mCameraService->connect(this, cameraId, String16(),
                 android::CameraService::USE_CALLING_UID, android::CameraService::USE_CALLING_PID,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &cameraDevice);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &cameraDevice);
         if (!rc.isOk()) {
             // camera not connected
             return;
@@ -534,7 +535,8 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, &device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
+                &device);
         if (device == nullptr) {
             continue;
         }
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index e9f6979..2f55def 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -102,23 +102,57 @@
     sp<device::V3_2::ICameraDevice> mDeviceInterface;
     hardware::hidl_vec<common::V1_0::VendorTagSection> mVendorTagSections;
 
+    // Whether to call a physical camera unavailable callback upon setCallback
+    bool mHasPhysicalCameraUnavailableCallback;
+    hardware::hidl_string mLogicalCameraId;
+    hardware::hidl_string mUnavailablePhysicalCameraId;
+
     TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
             const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection) :
         mDeviceNames(devices),
         mDeviceInterface(new TestDeviceInterface(devices)),
-        mVendorTagSections (vendorSection) {}
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(false) {}
 
     TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
             const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
             android::hardware::hidl_vec<uint8_t> chars) :
         mDeviceNames(devices),
         mDeviceInterface(new TestDeviceInterface(devices, chars)),
-        mVendorTagSections (vendorSection) {}
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(false) {}
+
+    TestICameraProvider(const std::vector<hardware::hidl_string> &devices,
+            const hardware::hidl_vec<common::V1_0::VendorTagSection> &vendorSection,
+            android::hardware::hidl_vec<uint8_t> chars,
+            const hardware::hidl_string& logicalCameraId,
+            const hardware::hidl_string& unavailablePhysicalCameraId) :
+        mDeviceNames(devices),
+        mDeviceInterface(new TestDeviceInterface(devices, chars)),
+        mVendorTagSections (vendorSection),
+        mHasPhysicalCameraUnavailableCallback(true),
+        mLogicalCameraId(logicalCameraId),
+        mUnavailablePhysicalCameraId(unavailablePhysicalCameraId) {}
 
     virtual hardware::Return<Status> setCallback(
             const sp<provider::V2_4::ICameraProviderCallback>& callbacks) override {
         mCalledCounter[SET_CALLBACK]++;
         mCallbacks = callbacks;
+        if (mHasPhysicalCameraUnavailableCallback) {
+            auto cast26 = provider::V2_6::ICameraProviderCallback::castFrom(callbacks);
+            if (!cast26.isOk()) {
+                ADD_FAILURE() << "Failed to cast ICameraProviderCallback to V2_6";
+            } else {
+                sp<provider::V2_6::ICameraProviderCallback> callback26 = cast26;
+                if (callback26 == nullptr) {
+                    ADD_FAILURE() << "V2_6::ICameraProviderCallback is null after conversion";
+                } else {
+                    callback26->physicalCameraDeviceStatusChange(mLogicalCameraId,
+                            mUnavailablePhysicalCameraId,
+                            android::hardware::camera::common::V1_0::CameraDeviceStatus::NOT_PRESENT);
+                }
+            }
+        }
         return hardware::Return<Status>(Status::OK);
     }
 
@@ -266,12 +300,16 @@
 };
 
 struct TestStatusListener : public CameraProviderManager::StatusListener {
+    int mPhysicalCameraStatusChangeCount = 0;
+
     ~TestStatusListener() {}
 
     void onDeviceStatusChanged(const String8 &,
             CameraDeviceStatus) override {}
     void onDeviceStatusChanged(const String8 &, const String8 &,
-            CameraDeviceStatus) override {}
+            CameraDeviceStatus) override {
+        mPhysicalCameraStatusChangeCount++;
+    }
     void onTorchStatusChanged(const String8 &,
             TorchModeStatus) override {}
     void onTorchStatusChanged(const String8 &,
@@ -634,3 +672,46 @@
     ASSERT_EQ(deviceCount, deviceNames.size()) <<
             "Unexpected amount of camera devices";
 }
+
+// Test that CameraProviderManager does not trigger
+// onDeviceStatusChanged(NOT_PRESENT) for physical camera before initialize()
+// returns.
+TEST(CameraProviderManagerTest, PhysicalCameraAvailabilityCallbackRaceTest) {
+    std::vector<hardware::hidl_string> deviceNames;
+    deviceNames.push_back("device@3.2/test/0");
+    hardware::hidl_vec<common::V1_0::VendorTagSection> vendorSection;
+
+    sp<CameraProviderManager> providerManager = new CameraProviderManager();
+    sp<TestStatusListener> statusListener = new TestStatusListener();
+    TestInteractionProxy serviceProxy;
+
+    android::hardware::hidl_vec<uint8_t> chars;
+    CameraMetadata meta;
+    int32_t charKeys[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES };
+    meta.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, charKeys,
+            sizeof(charKeys) / sizeof(charKeys[0]));
+    uint8_t capabilities[] = { ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA };
+    meta.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, capabilities,
+            sizeof(capabilities)/sizeof(capabilities[0]));
+    uint8_t physicalCameraIds[] = { '2', '\0', '3', '\0' };
+    meta.update(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, physicalCameraIds,
+            sizeof(physicalCameraIds)/sizeof(physicalCameraIds[0]));
+    camera_metadata_t* metaBuffer = const_cast<camera_metadata_t*>(meta.getAndLock());
+    chars.setToExternal(reinterpret_cast<uint8_t*>(metaBuffer),
+            get_camera_metadata_size(metaBuffer));
+
+    sp<TestICameraProvider> provider = new TestICameraProvider(deviceNames,
+            vendorSection, chars, "device@3.2/test/0", "2");
+    serviceProxy.setProvider(provider);
+
+    status_t res = providerManager->initialize(statusListener, &serviceProxy);
+    ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+    ASSERT_EQ(statusListener->mPhysicalCameraStatusChangeCount, 0)
+            << "Unexpected physical camera status change callback upon provider init.";
+
+    std::unordered_map<std::string, std::set<std::string>> unavailablePhysicalIds;
+    auto cameraIds = providerManager->getCameraDeviceIds(&unavailablePhysicalIds);
+    ASSERT_TRUE(unavailablePhysicalIds.count("0") > 0 && unavailablePhysicalIds["0"].count("2") > 0)
+        << "Unavailable physical camera Ids not set properly.";
+}
diff --git a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
index 3c187cd..9f86526 100644
--- a/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/RotateAndCropMapperTest.cpp
@@ -195,6 +195,7 @@
 
     // Round-trip results can't be exact since we've gone from a large int range -> small int range
     // and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
+
     e = result.find(ANDROID_CONTROL_AE_REGIONS);
     EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
 
@@ -209,11 +210,11 @@
     EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
 
     auto full_landmarks = std::vector<int32_t> {
-        full_crop[0], full_crop[1] + full_crop[3],
         full_crop[0] + full_crop[2], full_crop[1],
-        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
+        full_crop[0], full_crop[1] + full_crop[3],
+        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
         full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
-        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
+        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
     };
     e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
     EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
@@ -286,7 +287,6 @@
 
     // Round-trip results can't be exact since we've gone from a large int range -> small int range
     // and back, leading to quantization. For 4/3 aspect ratio, no more than +-1 error expected
-
     e = result.find(ANDROID_CONTROL_AE_REGIONS);
     EXPECT_EQUAL_WITHIN_N(full_region, e.data.i32, 1, "Round-tripped AE region isn't right");
 
@@ -301,11 +301,11 @@
     EXPECT_EQUAL_WITHIN_N(full_face, e.data.i32, 1, "App-side face rectangle isn't right");
 
     auto full_landmarks = std::vector<int32_t> {
-        full_crop[0] + full_crop[2], full_crop[1],
         full_crop[0], full_crop[1] + full_crop[3],
-        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4,
+        full_crop[0] + full_crop[2], full_crop[1],
+        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4,
         full_crop[0] + full_crop[2]/2, full_crop[1] + full_crop[3]/2,
-        full_crop[0] + full_crop[2]/4, full_crop[1] + 3*full_crop[3]/4
+        full_crop[0] + 3*full_crop[2]/4, full_crop[1] + full_crop[3]/4
     };
     e = result.find(ANDROID_STATISTICS_FACE_LANDMARKS);
     EXPECT_EQUAL_WITHIN_N(full_landmarks, e.data.i32, 1, "App-side face landmarks aren't right");
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 9f48f80..f4ee84f 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -435,7 +435,15 @@
             }
         }
         if (isIdle_l() && AudioClock::getNanoseconds() >= standbyTime) {
-            standby_l();
+            aaudio_result_t result = standby_l();
+            if (result != AAUDIO_OK) {
+                // If standby failed because of the function is not implemented, there is no
+                // need to retry. Otherwise, retry standby later.
+                ALOGW("Failed to enter standby, error=%d", result);
+                standbyTime = result == AAUDIO_ERROR_UNIMPLEMENTED
+                        ? std::numeric_limits<int64_t>::max()
+                        : AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+            }
         }
 
         if (command != nullptr) {
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index b2ba725..b5f8b90 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -320,7 +320,7 @@
     }
 
     virtual aaudio_result_t standby_l() REQUIRES(mLock) {
-        return AAUDIO_ERROR_UNAVAILABLE;
+        return AAUDIO_ERROR_UNIMPLEMENTED;
     }
     class ExitStandbyParam : public AAudioCommandParam {
     public: